You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2018/04/20 14:46:42 UTC

[01/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 12c47ed27 -> 1facf1d3a


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
index 6cb193a..1ce94ea 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
@@ -30,92 +30,93 @@
 <span class="sourceLineNo">022</span><a name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.commons.logging.Log;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.commons.logging.LogFactory;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.conf.Configuration;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.TableName;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.client.Get;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.Put;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.client.Result;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.client.Table;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.testclassification.RegionServerTests;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.junit.After;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.junit.AfterClass;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.junit.Before;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.junit.BeforeClass;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.junit.ClassRule;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.junit.Rule;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.junit.Test;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.junit.experimental.categories.Category;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.junit.rules.TestName;<a name="line.46"></a>
-<span class="sourceLineNo">047</span><a name="line.47"></a>
-<span class="sourceLineNo">048</span>@Category({ RegionServerTests.class, MediumTests.class })<a name="line.48"></a>
-<span class="sourceLineNo">049</span>public class TestDisabledWAL {<a name="line.49"></a>
-<span class="sourceLineNo">050</span><a name="line.50"></a>
-<span class="sourceLineNo">051</span>  @ClassRule<a name="line.51"></a>
-<span class="sourceLineNo">052</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.52"></a>
-<span class="sourceLineNo">053</span>      HBaseClassTestRule.forClass(TestDisabledWAL.class);<a name="line.53"></a>
-<span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>  @Rule<a name="line.55"></a>
-<span class="sourceLineNo">056</span>  public TestName name = new TestName();<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  private static final Log LOG = LogFactory.getLog(TestDisabledWAL.class);<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.59"></a>
-<span class="sourceLineNo">060</span>  private Table table;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>  private TableName tableName;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  private byte[] fam = Bytes.toBytes("f1");<a name="line.62"></a>
-<span class="sourceLineNo">063</span><a name="line.63"></a>
-<span class="sourceLineNo">064</span>  @BeforeClass<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  public static void beforeClass() throws Exception {<a name="line.65"></a>
-<span class="sourceLineNo">066</span>    Configuration conf = TEST_UTIL.getConfiguration();<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    conf.setBoolean("hbase.regionserver.hlog.enabled", false);<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    try {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>      TEST_UTIL.startMiniCluster();<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    } catch (RuntimeException | IOException e) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>      LOG.error("Master failed to start.", e);<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      fail("Failed to start cluster. Reason being: " + e.getCause().getMessage());<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    }<a name="line.73"></a>
-<span class="sourceLineNo">074</span>  }<a name="line.74"></a>
-<span class="sourceLineNo">075</span><a name="line.75"></a>
-<span class="sourceLineNo">076</span>  @AfterClass<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  public static void afterClass() throws Exception {<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>  @Before<a name="line.81"></a>
-<span class="sourceLineNo">082</span>  public void setup() throws Exception {<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    tableName = TableName.valueOf(name.getMethodName().replaceAll("[^a-zA-Z0-9]", "_"));<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    LOG.info("Creating table " + tableName);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    table = TEST_UTIL.createTable(tableName, fam);<a name="line.85"></a>
-<span class="sourceLineNo">086</span>  }<a name="line.86"></a>
-<span class="sourceLineNo">087</span><a name="line.87"></a>
-<span class="sourceLineNo">088</span>  @After<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public void cleanup() throws Exception {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    LOG.info("Deleting table " + tableName);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    TEST_UTIL.deleteTable(tableName);<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  }<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>  @Test<a name="line.94"></a>
-<span class="sourceLineNo">095</span>  public void testDisabledWAL() throws Exception {<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    LOG.info("Writing data to table " + tableName);<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    Put p = new Put(Bytes.toBytes("row"));<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    p.addColumn(fam, Bytes.toBytes("qual"), Bytes.toBytes("val"));<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    table.put(p);<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>    LOG.info("Flushing table " + tableName);<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    TEST_UTIL.flush(tableName);<a name="line.102"></a>
-<span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>    LOG.info("Getting data from table " + tableName);<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    Get get = new Get(Bytes.toBytes("row"));<a name="line.105"></a>
-<span class="sourceLineNo">106</span><a name="line.106"></a>
-<span class="sourceLineNo">107</span>    Result result = table.get(get);<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    assertNotNull(result.getValue(fam, Bytes.toBytes("qual")));<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
-<span class="sourceLineNo">110</span>}<a name="line.110"></a>
+<span class="sourceLineNo">025</span>import org.apache.hadoop.conf.Configuration;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.TableName;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.client.Get;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.client.Put;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.client.Result;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.Table;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.testclassification.RegionServerTests;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.junit.After;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.junit.AfterClass;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.junit.Before;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.junit.BeforeClass;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.junit.ClassRule;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.junit.Rule;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.junit.Test;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.junit.experimental.categories.Category;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.junit.rules.TestName;<a name="line.44"></a>
+<span class="sourceLineNo">045</span><a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.slf4j.Logger;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.slf4j.LoggerFactory;<a name="line.47"></a>
+<span class="sourceLineNo">048</span><a name="line.48"></a>
+<span class="sourceLineNo">049</span>@Category({ RegionServerTests.class, MediumTests.class })<a name="line.49"></a>
+<span class="sourceLineNo">050</span>public class TestDisabledWAL {<a name="line.50"></a>
+<span class="sourceLineNo">051</span><a name="line.51"></a>
+<span class="sourceLineNo">052</span>  @ClassRule<a name="line.52"></a>
+<span class="sourceLineNo">053</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.53"></a>
+<span class="sourceLineNo">054</span>      HBaseClassTestRule.forClass(TestDisabledWAL.class);<a name="line.54"></a>
+<span class="sourceLineNo">055</span><a name="line.55"></a>
+<span class="sourceLineNo">056</span>  @Rule<a name="line.56"></a>
+<span class="sourceLineNo">057</span>  public TestName name = new TestName();<a name="line.57"></a>
+<span class="sourceLineNo">058</span><a name="line.58"></a>
+<span class="sourceLineNo">059</span>  private static final Logger LOG = LoggerFactory.getLogger(TestDisabledWAL.class);<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  private Table table;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>  private TableName tableName;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>  private byte[] fam = Bytes.toBytes("f1");<a name="line.63"></a>
+<span class="sourceLineNo">064</span><a name="line.64"></a>
+<span class="sourceLineNo">065</span>  @BeforeClass<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  public static void beforeClass() throws Exception {<a name="line.66"></a>
+<span class="sourceLineNo">067</span>    Configuration conf = TEST_UTIL.getConfiguration();<a name="line.67"></a>
+<span class="sourceLineNo">068</span>    conf.setBoolean("hbase.regionserver.hlog.enabled", false);<a name="line.68"></a>
+<span class="sourceLineNo">069</span>    try {<a name="line.69"></a>
+<span class="sourceLineNo">070</span>      TEST_UTIL.startMiniCluster();<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    } catch (RuntimeException | IOException e) {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>      LOG.error("Master failed to start.", e);<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      fail("Failed to start cluster. Reason being: " + e.getCause().getMessage());<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    }<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  }<a name="line.75"></a>
+<span class="sourceLineNo">076</span><a name="line.76"></a>
+<span class="sourceLineNo">077</span>  @AfterClass<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  public static void afterClass() throws Exception {<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
+<span class="sourceLineNo">081</span><a name="line.81"></a>
+<span class="sourceLineNo">082</span>  @Before<a name="line.82"></a>
+<span class="sourceLineNo">083</span>  public void setup() throws Exception {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    tableName = TableName.valueOf(name.getMethodName().replaceAll("[^a-zA-Z0-9]", "_"));<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    LOG.info("Creating table " + tableName);<a name="line.85"></a>
+<span class="sourceLineNo">086</span>    table = TEST_UTIL.createTable(tableName, fam);<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  }<a name="line.87"></a>
+<span class="sourceLineNo">088</span><a name="line.88"></a>
+<span class="sourceLineNo">089</span>  @After<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  public void cleanup() throws Exception {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    LOG.info("Deleting table " + tableName);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    TEST_UTIL.deleteTable(tableName);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
+<span class="sourceLineNo">094</span><a name="line.94"></a>
+<span class="sourceLineNo">095</span>  @Test<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  public void testDisabledWAL() throws Exception {<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    LOG.info("Writing data to table " + tableName);<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    Put p = new Put(Bytes.toBytes("row"));<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    p.addColumn(fam, Bytes.toBytes("qual"), Bytes.toBytes("val"));<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    table.put(p);<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>    LOG.info("Flushing table " + tableName);<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    TEST_UTIL.flush(tableName);<a name="line.103"></a>
+<span class="sourceLineNo">104</span><a name="line.104"></a>
+<span class="sourceLineNo">105</span>    LOG.info("Getting data from table " + tableName);<a name="line.105"></a>
+<span class="sourceLineNo">106</span>    Get get = new Get(Bytes.toBytes("row"));<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>    Result result = table.get(get);<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    assertNotNull(result.getValue(fam, Bytes.toBytes("qual")));<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  }<a name="line.110"></a>
+<span class="sourceLineNo">111</span>}<a name="line.111"></a>
 
 
 


[26/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/apache_hbase_reference_guide.pdf
----------------------------------------------------------------------
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 14ebd1a..7ac23b6 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,16 +5,16 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20180419144425+00'00')
-/CreationDate (D:20180419144425+00'00')
+/ModDate (D:20180420144439+00'00')
+/CreationDate (D:20180420144439+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 26 0 R
-/Outlines 4616 0 R
-/PageLabels 4842 0 R
+/Outlines 4634 0 R
+/PageLabels 4860 0 R
 /PageMode /UseOutlines
 /OpenAction [7 0 R /FitH 842.89]
 /ViewerPreferences << /DisplayDocTitle true
@@ -23,8 +23,8 @@ endobj
 endobj
 3 0 obj
 << /Type /Pages
-/Count 721
-/Kids [7 0 R 12 0 R 14 0 R 16 0 R 18 0 R 20 0 R 22 0 R 24 0 R 44 0 R 47 0 R 50 0 R 54 0 R 61 0 R 63 0 R 67 0 R 69 0 R 71 0 R 78 0 R 81 0 R 83 0 R 89 0 R 92 0 R 94 0 R 96 0 R 103 0 R 110 0 R 115 0 R 117 0 R 133 0 R 138 0 R 146 0 R 155 0 R 163 0 R 172 0 R 183 0 R 187 0 R 189 0 R 193 0 R 202 0 R 211 0 R 219 0 R 228 0 R 233 0 R 242 0 R 250 0 R 259 0 R 272 0 R 279 0 R 289 0 R 297 0 R 305 0 R 312 0 R 320 0 R 327 0 R 333 0 R 340 0 R 348 0 R 357 0 R 366 0 R 380 0 R 387 0 R 395 0 R 402 0 R 410 0 R 419 0 R 429 0 R 437 0 R 444 0 R 453 0 R 465 0 R 475 0 R 482 0 R 489 0 R 497 0 R 506 0 R 514 0 R 519 0 R 523 0 R 528 0 R 532 0 R 548 0 R 559 0 R 563 0 R 578 0 R 583 0 R 588 0 R 590 0 R 592 0 R 595 0 R 597 0 R 599 0 R 607 0 R 613 0 R 618 0 R 623 0 R 630 0 R 640 0 R 648 0 R 652 0 R 656 0 R 658 0 R 668 0 R 682 0 R 691 0 R 700 0 R 710 0 R 721 0 R 732 0 R 751 0 R 757 0 R 760 0 R 766 0 R 769 0 R 773 0 R 777 0 R 780 0 R 783 0 R 785 0 R 788 0 R 792 0 R 794 0 R 798 0 R 804 0 R 809 0 R 813 0 R 816 0 R 822 0 R
  824 0 R 828 0 R 836 0 R 838 0 R 841 0 R 844 0 R 847 0 R 850 0 R 864 0 R 872 0 R 883 0 R 894 0 R 900 0 R 910 0 R 921 0 R 924 0 R 928 0 R 931 0 R 936 0 R 945 0 R 953 0 R 957 0 R 961 0 R 966 0 R 970 0 R 972 0 R 988 0 R 999 0 R 1004 0 R 1011 0 R 1014 0 R 1022 0 R 1030 0 R 1035 0 R 1040 0 R 1045 0 R 1047 0 R 1049 0 R 1051 0 R 1061 0 R 1069 0 R 1073 0 R 1080 0 R 1087 0 R 1095 0 R 1099 0 R 1105 0 R 1110 0 R 1118 0 R 1122 0 R 1127 0 R 1129 0 R 1135 0 R 1143 0 R 1149 0 R 1156 0 R 1167 0 R 1171 0 R 1173 0 R 1175 0 R 1179 0 R 1182 0 R 1187 0 R 1190 0 R 1202 0 R 1206 0 R 1212 0 R 1220 0 R 1225 0 R 1229 0 R 1233 0 R 1235 0 R 1238 0 R 1241 0 R 1244 0 R 1248 0 R 1252 0 R 1256 0 R 1261 0 R 1265 0 R 1268 0 R 1270 0 R 1280 0 R 1283 0 R 1291 0 R 1300 0 R 1306 0 R 1310 0 R 1312 0 R 1323 0 R 1326 0 R 1332 0 R 1340 0 R 1343 0 R 1350 0 R 1358 0 R 1360 0 R 1362 0 R 1371 0 R 1373 0 R 1375 0 R 1378 0 R 1380 0 R 1382 0 R 1384 0 R 1386 0 R 1389 0 R 1393 0 R 1398 0 R 1400 0 R 1402 0 R 1404 0 R 1409 0 R 1416 0 
 R 1422 0 R 1425 0 R 1427 0 R 1430 0 R 1434 0 R 1438 0 R 1441 0 R 1443 0 R 1445 0 R 1448 0 R 1453 0 R 1459 0 R 1467 0 R 1481 0 R 1495 0 R 1498 0 R 1503 0 R 1516 0 R 1521 0 R 1536 0 R 1544 0 R 1548 0 R 1557 0 R 1572 0 R 1586 0 R 1598 0 R 1603 0 R 1609 0 R 1619 0 R 1624 0 R 1629 0 R 1637 0 R 1640 0 R 1649 0 R 1655 0 R 1659 0 R 1671 0 R 1676 0 R 1682 0 R 1684 0 R 1691 0 R 1699 0 R 1707 0 R 1711 0 R 1713 0 R 1715 0 R 1727 0 R 1733 0 R 1742 0 R 1748 0 R 1761 0 R 1767 0 R 1773 0 R 1784 0 R 1790 0 R 1795 0 R 1800 0 R 1803 0 R 1806 0 R 1811 0 R 1816 0 R 1823 0 R 1827 0 R 1832 0 R 1841 0 R 1846 0 R 1851 0 R 1853 0 R 1862 0 R 1869 0 R 1875 0 R 1880 0 R 1884 0 R 1888 0 R 1893 0 R 1898 0 R 1904 0 R 1906 0 R 1908 0 R 1911 0 R 1922 0 R 1925 0 R 1932 0 R 1940 0 R 1945 0 R 1949 0 R 1954 0 R 1956 0 R 1959 0 R 1964 0 R 1967 0 R 1969 0 R 1972 0 R 1975 0 R 1978 0 R 1988 0 R 1993 0 R 1998 0 R 2000 0 R 2008 0 R 2015 0 R 2022 0 R 2028 0 R 2033 0 R 2035 0 R 2044 0 R 2054 0 R 2064 0 R 2070 0 R 2077 0 R 2079 
 0 R 2084 0 R 2086 0 R 2088 0 R 2092 0 R 2095 0 R 2098 0 R 2103 0 R 2107 0 R 2118 0 R 2121 0 R 2126 0 R 2129 0 R 2131 0 R 2136 0 R 2146 0 R 2148 0 R 2150 0 R 2152 0 R 2154 0 R 2157 0 R 2159 0 R 2161 0 R 2164 0 R 2166 0 R 2168 0 R 2172 0 R 2177 0 R 2186 0 R 2188 0 R 2190 0 R 2196 0 R 2198 0 R 2203 0 R 2205 0 R 2207 0 R 2214 0 R 2219 0 R 2223 0 R 2228 0 R 2232 0 R 2234 0 R 2236 0 R 2240 0 R 2243 0 R 2245 0 R 2247 0 R 2251 0 R 2253 0 R 2256 0 R 2258 0 R 2260 0 R 2262 0 R 2269 0 R 2272 0 R 2277 0 R 2279 0 R 2281 0 R 2283 0 R 2285 0 R 2293 0 R 2304 0 R 2318 0 R 2329 0 R 2333 0 R 2338 0 R 2342 0 R 2345 0 R 2350 0 R 2356 0 R 2358 0 R 2361 0 R 2363 0 R 2365 0 R 2367 0 R 2372 0 R 2374 0 R 2387 0 R 2390 0 R 2398 0 R 2404 0 R 2416 0 R 2430 0 R 2443 0 R 2462 0 R 2464 0 R 2466 0 R 2470 0 R 2488 0 R 2494 0 R 2506 0 R 2510 0 R 2514 0 R 2523 0 R 2535 0 R 2540 0 R 2550 0 R 2563 0 R 2582 0 R 2591 0 R 2594 0 R 2603 0 R 2620 0 R 2627 0 R 2630 0 R 2635 0 R 2639 0 R 2642 0 R 2651 0 R 2660 0 R 2663 0 R 266
 5 0 R 2669 0 R 2683 0 R 2692 0 R 2697 0 R 2701 0 R 2704 0 R 2706 0 R 2708 0 R 2710 0 R 2715 0 R 2728 0 R 2738 0 R 2746 0 R 2752 0 R 2757 0 R 2768 0 R 2775 0 R 2781 0 R 2783 0 R 2792 0 R 2800 0 R 2802 0 R 2810 0 R 2818 0 R 2820 0 R 2829 0 R 2832 0 R 2842 0 R 2846 0 R 2855 0 R 2863 0 R 2868 0 R 2872 0 R 2876 0 R 2878 0 R 2884 0 R 2888 0 R 2892 0 R 2898 0 R 2904 0 R 2907 0 R 2913 0 R 2917 0 R 2926 0 R 2931 0 R 2936 0 R 2946 0 R 2952 0 R 2959 0 R 2962 0 R 2965 0 R 2972 0 R 2977 0 R 2980 0 R 2985 0 R 2995 0 R 3000 0 R 3002 0 R 3006 0 R 3013 0 R 3016 0 R 3027 0 R 3030 0 R 3037 0 R 3045 0 R 3049 0 R 3059 0 R 3064 0 R 3068 0 R 3076 0 R 3082 0 R 3086 0 R 3088 0 R 3099 0 R 3104 0 R 3107 0 R 3109 0 R 3111 0 R 3121 0 R 3128 0 R 3132 0 R 3135 0 R 3141 0 R 3145 0 R 3148 0 R 3152 0 R 3157 0 R 3165 0 R 3170 0 R 3175 0 R 3180 0 R 3182 0 R 3185 0 R 3187 0 R 3191 0 R 3202 0 R 3204 0 R 3208 0 R 3211 0 R 3215 0 R 3218 0 R 3222 0 R 3224 0 R 3237 0 R 3242 0 R 3247 0 R 3253 0 R 3261 0 R 3263 0 R 3271 0 R 3
 289 0 R 3300 0 R 3307 0 R 3323 0 R 3326 0 R 3331 0 R 3333 0 R 3340 0 R 3345 0 R 3348 0 R 3350 0 R 3352 0 R 3354 0 R 3357 0 R 3375 0 R 3378 0 R 3383 0 R 3389 0 R 3399 0 R 3404 0 R 3414 0 R 3424 0 R 3432 0 R 3437 0 R 3443 0 R 3448 0 R 3451 0 R 3459 0 R 3463 0 R 3468 0 R 3473 0 R 3486 0 R 3489 0 R 3495 0 R 3500 0 R 3509 0 R 3518 0 R 3524 0 R 3533 0 R 3542 0 R 3547 0 R 3553 0 R 3559 0 R 3563 0 R 3565 0 R 3571 0 R 3578 0 R 3580 0 R 3586 0 R 3588 0 R 3594 0 R 3602 0 R 3608 0 R 3617 0 R 3624 0 R 3635 0 R 3645 0 R 3656 0 R 3669 0 R 3672 0 R 3674 0 R 3679 0 R 3694 0 R 3700 0 R 3706 0 R 3710 0 R 3713 0 R 3718 0 R 3720 0 R 3724 0 R 3726 0 R 3730 0 R 3733 0 R 3736 0 R 3744 0 R 3746 0 R 3750 0 R 3753 0 R 3761 0 R 3769 0 R 3773 0 R 3776 0 R 3778 0 R 3782 0 R 3787 0 R 3792 0 R 3795 0 R 3804 0 R 3809 0 R 3813 0 R 3816 0 R 3824 0 R 3829 0 R 3837 0 R 3842 0 R 3844 0 R 3850 0 R 3852 0 R 3857 0 R 3861 0 R 3867 0 R 3871 0 R 3883 0 R 3899 0 R 3914 0 R 3919 0 R 3922 0 R 3925 0 R 3931 0 R 3936 0 R 3938 0 R
  3940 0 R 3942 0 R 3944 0 R 3946 0 R 3955 0 R 3959 0 R 3963 0 R 3967 0 R 3969 0 R 3976 0 R 3986 0 R 3995 0 R 3998 0 R 4001 0 R 4003 0 R 4010 0 R 4017 0 R 4027 0 R 4031 0 R 4034 0 R 4038 0 R 4042 0 R 4048 0 R 4051 0 R 4067 0 R 4072 0 R 4095 0 R 4099 0 R 4106 0 R 4117 0 R 4126 0 R 4129 0 R 4132 0 R 4135 0 R 4151 0 R 4157 0 R 4164 0 R]
+/Count 723
+/Kids [7 0 R 12 0 R 14 0 R 16 0 R 18 0 R 20 0 R 22 0 R 24 0 R 44 0 R 47 0 R 50 0 R 54 0 R 61 0 R 63 0 R 67 0 R 69 0 R 71 0 R 78 0 R 81 0 R 83 0 R 89 0 R 92 0 R 94 0 R 96 0 R 103 0 R 110 0 R 115 0 R 117 0 R 133 0 R 138 0 R 146 0 R 155 0 R 163 0 R 172 0 R 183 0 R 187 0 R 189 0 R 193 0 R 202 0 R 211 0 R 219 0 R 228 0 R 233 0 R 242 0 R 250 0 R 259 0 R 272 0 R 279 0 R 289 0 R 297 0 R 305 0 R 312 0 R 320 0 R 327 0 R 333 0 R 340 0 R 348 0 R 357 0 R 366 0 R 380 0 R 387 0 R 395 0 R 402 0 R 410 0 R 419 0 R 429 0 R 437 0 R 444 0 R 453 0 R 465 0 R 475 0 R 482 0 R 489 0 R 497 0 R 506 0 R 514 0 R 519 0 R 523 0 R 528 0 R 532 0 R 548 0 R 559 0 R 563 0 R 578 0 R 583 0 R 588 0 R 590 0 R 592 0 R 595 0 R 597 0 R 599 0 R 607 0 R 613 0 R 618 0 R 623 0 R 630 0 R 640 0 R 648 0 R 652 0 R 656 0 R 658 0 R 668 0 R 682 0 R 691 0 R 700 0 R 710 0 R 721 0 R 732 0 R 754 0 R 760 0 R 764 0 R 770 0 R 773 0 R 777 0 R 781 0 R 784 0 R 787 0 R 789 0 R 792 0 R 796 0 R 798 0 R 802 0 R 808 0 R 813 0 R 817 0 R 820 0 R 826 0 R
  828 0 R 832 0 R 840 0 R 842 0 R 845 0 R 848 0 R 851 0 R 854 0 R 868 0 R 876 0 R 887 0 R 898 0 R 904 0 R 914 0 R 925 0 R 928 0 R 932 0 R 935 0 R 940 0 R 949 0 R 957 0 R 961 0 R 965 0 R 970 0 R 974 0 R 976 0 R 992 0 R 1003 0 R 1008 0 R 1015 0 R 1018 0 R 1026 0 R 1034 0 R 1039 0 R 1044 0 R 1049 0 R 1051 0 R 1053 0 R 1055 0 R 1065 0 R 1073 0 R 1077 0 R 1084 0 R 1091 0 R 1099 0 R 1103 0 R 1109 0 R 1114 0 R 1122 0 R 1126 0 R 1131 0 R 1133 0 R 1139 0 R 1147 0 R 1153 0 R 1160 0 R 1171 0 R 1175 0 R 1177 0 R 1179 0 R 1183 0 R 1186 0 R 1191 0 R 1194 0 R 1206 0 R 1210 0 R 1216 0 R 1224 0 R 1229 0 R 1233 0 R 1237 0 R 1239 0 R 1242 0 R 1245 0 R 1248 0 R 1252 0 R 1256 0 R 1260 0 R 1265 0 R 1269 0 R 1272 0 R 1274 0 R 1284 0 R 1287 0 R 1295 0 R 1304 0 R 1310 0 R 1314 0 R 1316 0 R 1327 0 R 1330 0 R 1336 0 R 1344 0 R 1347 0 R 1354 0 R 1362 0 R 1364 0 R 1366 0 R 1375 0 R 1377 0 R 1379 0 R 1382 0 R 1384 0 R 1386 0 R 1388 0 R 1390 0 R 1393 0 R 1397 0 R 1402 0 R 1404 0 R 1406 0 R 1408 0 R 1413 0 R 1420 0
  R 1426 0 R 1429 0 R 1431 0 R 1434 0 R 1438 0 R 1442 0 R 1445 0 R 1447 0 R 1449 0 R 1452 0 R 1457 0 R 1463 0 R 1471 0 R 1485 0 R 1499 0 R 1502 0 R 1507 0 R 1520 0 R 1525 0 R 1540 0 R 1548 0 R 1552 0 R 1561 0 R 1576 0 R 1590 0 R 1598 0 R 1603 0 R 1614 0 R 1619 0 R 1625 0 R 1631 0 R 1643 0 R 1646 0 R 1655 0 R 1658 0 R 1667 0 R 1673 0 R 1677 0 R 1689 0 R 1694 0 R 1700 0 R 1702 0 R 1709 0 R 1717 0 R 1725 0 R 1729 0 R 1731 0 R 1733 0 R 1745 0 R 1751 0 R 1760 0 R 1766 0 R 1779 0 R 1785 0 R 1791 0 R 1802 0 R 1808 0 R 1813 0 R 1818 0 R 1821 0 R 1824 0 R 1829 0 R 1834 0 R 1841 0 R 1845 0 R 1850 0 R 1859 0 R 1864 0 R 1869 0 R 1871 0 R 1880 0 R 1887 0 R 1893 0 R 1898 0 R 1902 0 R 1906 0 R 1911 0 R 1916 0 R 1922 0 R 1924 0 R 1926 0 R 1929 0 R 1940 0 R 1943 0 R 1950 0 R 1958 0 R 1963 0 R 1967 0 R 1972 0 R 1974 0 R 1977 0 R 1982 0 R 1985 0 R 1987 0 R 1990 0 R 1993 0 R 1996 0 R 2006 0 R 2011 0 R 2016 0 R 2018 0 R 2026 0 R 2033 0 R 2040 0 R 2046 0 R 2051 0 R 2053 0 R 2062 0 R 2072 0 R 2082 0 R 2088
  0 R 2095 0 R 2097 0 R 2102 0 R 2104 0 R 2106 0 R 2110 0 R 2113 0 R 2116 0 R 2121 0 R 2125 0 R 2136 0 R 2139 0 R 2144 0 R 2147 0 R 2149 0 R 2154 0 R 2164 0 R 2166 0 R 2168 0 R 2170 0 R 2172 0 R 2175 0 R 2177 0 R 2179 0 R 2182 0 R 2184 0 R 2186 0 R 2190 0 R 2195 0 R 2204 0 R 2206 0 R 2208 0 R 2214 0 R 2216 0 R 2221 0 R 2223 0 R 2225 0 R 2232 0 R 2237 0 R 2241 0 R 2246 0 R 2250 0 R 2252 0 R 2254 0 R 2258 0 R 2261 0 R 2263 0 R 2265 0 R 2269 0 R 2271 0 R 2274 0 R 2276 0 R 2278 0 R 2280 0 R 2287 0 R 2290 0 R 2295 0 R 2297 0 R 2299 0 R 2301 0 R 2303 0 R 2311 0 R 2322 0 R 2336 0 R 2347 0 R 2351 0 R 2356 0 R 2360 0 R 2363 0 R 2368 0 R 2374 0 R 2376 0 R 2379 0 R 2381 0 R 2383 0 R 2385 0 R 2390 0 R 2392 0 R 2405 0 R 2408 0 R 2416 0 R 2422 0 R 2434 0 R 2448 0 R 2461 0 R 2480 0 R 2482 0 R 2484 0 R 2488 0 R 2506 0 R 2512 0 R 2524 0 R 2528 0 R 2532 0 R 2541 0 R 2553 0 R 2558 0 R 2568 0 R 2581 0 R 2600 0 R 2609 0 R 2612 0 R 2621 0 R 2638 0 R 2645 0 R 2648 0 R 2653 0 R 2657 0 R 2660 0 R 2669 0 R 26
 78 0 R 2681 0 R 2683 0 R 2687 0 R 2701 0 R 2710 0 R 2715 0 R 2719 0 R 2722 0 R 2724 0 R 2726 0 R 2728 0 R 2733 0 R 2746 0 R 2756 0 R 2764 0 R 2770 0 R 2775 0 R 2786 0 R 2793 0 R 2799 0 R 2801 0 R 2810 0 R 2818 0 R 2820 0 R 2828 0 R 2836 0 R 2838 0 R 2847 0 R 2850 0 R 2860 0 R 2864 0 R 2873 0 R 2881 0 R 2886 0 R 2890 0 R 2894 0 R 2896 0 R 2902 0 R 2906 0 R 2910 0 R 2916 0 R 2922 0 R 2925 0 R 2931 0 R 2935 0 R 2944 0 R 2949 0 R 2955 0 R 2965 0 R 2971 0 R 2978 0 R 2981 0 R 2984 0 R 2991 0 R 2996 0 R 2999 0 R 3004 0 R 3014 0 R 3019 0 R 3021 0 R 3025 0 R 3032 0 R 3035 0 R 3046 0 R 3049 0 R 3056 0 R 3064 0 R 3068 0 R 3078 0 R 3083 0 R 3087 0 R 3095 0 R 3100 0 R 3104 0 R 3106 0 R 3117 0 R 3122 0 R 3125 0 R 3127 0 R 3129 0 R 3139 0 R 3146 0 R 3150 0 R 3153 0 R 3159 0 R 3163 0 R 3166 0 R 3170 0 R 3175 0 R 3183 0 R 3188 0 R 3193 0 R 3198 0 R 3200 0 R 3203 0 R 3205 0 R 3209 0 R 3220 0 R 3222 0 R 3226 0 R 3229 0 R 3233 0 R 3236 0 R 3240 0 R 3242 0 R 3255 0 R 3260 0 R 3265 0 R 3271 0 R 3279 0 R 
 3281 0 R 3289 0 R 3307 0 R 3318 0 R 3325 0 R 3341 0 R 3344 0 R 3349 0 R 3351 0 R 3358 0 R 3363 0 R 3366 0 R 3368 0 R 3370 0 R 3372 0 R 3375 0 R 3393 0 R 3396 0 R 3401 0 R 3407 0 R 3417 0 R 3422 0 R 3432 0 R 3442 0 R 3450 0 R 3455 0 R 3461 0 R 3466 0 R 3469 0 R 3477 0 R 3481 0 R 3486 0 R 3491 0 R 3504 0 R 3507 0 R 3513 0 R 3518 0 R 3527 0 R 3536 0 R 3542 0 R 3551 0 R 3560 0 R 3565 0 R 3571 0 R 3577 0 R 3581 0 R 3583 0 R 3589 0 R 3596 0 R 3598 0 R 3604 0 R 3606 0 R 3612 0 R 3620 0 R 3626 0 R 3635 0 R 3642 0 R 3653 0 R 3663 0 R 3675 0 R 3688 0 R 3691 0 R 3693 0 R 3698 0 R 3713 0 R 3719 0 R 3725 0 R 3729 0 R 3732 0 R 3737 0 R 3739 0 R 3743 0 R 3745 0 R 3749 0 R 3752 0 R 3755 0 R 3763 0 R 3765 0 R 3769 0 R 3772 0 R 3780 0 R 3788 0 R 3792 0 R 3795 0 R 3797 0 R 3801 0 R 3806 0 R 3811 0 R 3814 0 R 3823 0 R 3828 0 R 3832 0 R 3835 0 R 3843 0 R 3848 0 R 3856 0 R 3861 0 R 3863 0 R 3869 0 R 3871 0 R 3876 0 R 3880 0 R 3886 0 R 3890 0 R 3902 0 R 3918 0 R 3933 0 R 3938 0 R 3941 0 R 3944 0 R 3950 0 
 R 3955 0 R 3957 0 R 3959 0 R 3961 0 R 3963 0 R 3965 0 R 3974 0 R 3978 0 R 3982 0 R 3986 0 R 3988 0 R 3995 0 R 4005 0 R 4014 0 R 4017 0 R 4020 0 R 4022 0 R 4029 0 R 4036 0 R 4046 0 R 4050 0 R 4053 0 R 4057 0 R 4061 0 R 4067 0 R 4070 0 R 4086 0 R 4091 0 R 4114 0 R 4118 0 R 4125 0 R 4136 0 R 4145 0 R 4148 0 R 4151 0 R 4154 0 R 4170 0 R 4175 0 R 4182 0 R]
 >>
 endobj
 4 0 obj
@@ -187,11 +187,11 @@ endobj
 << /Type /Font
 /BaseFont /71be00+NotoSerif
 /Subtype /TrueType
-/FontDescriptor 4844 0 R
+/FontDescriptor 4862 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 4846 0 R
-/ToUnicode 4845 0 R
+/Widths 4864 0 R
+/ToUnicode 4863 0 R
 >>
 endobj
 11 0 obj
@@ -1750,7 +1750,7 @@ endobj
 /F1.0 10 0 R
 >>
 >>
-/Annots [4166 0 R 4167 0 R 4168 0 R 4169 0 R 4170 0 R 4171 0 R 4172 0 R 4173 0 R 4174 0 R 4175 0 R 4176 0 R 4177 0 R 4178 0 R 4179 0 R 4180 0 R 4181 0 R 4182 0 R 4183 0 R 4184 0 R 4185 0 R 4186 0 R 4187 0 R 4188 0 R 4189 0 R 4190 0 R 4191 0 R 4192 0 R 4193 0 R 4194 0 R 4195 0 R 4196 0 R 4197 0 R 4198 0 R 4199 0 R 4200 0 R 4201 0 R 4202 0 R 4203 0 R 4204 0 R 4205 0 R 4206 0 R 4207 0 R 4208 0 R 4209 0 R 4210 0 R 4211 0 R 4212 0 R 4213 0 R 4214 0 R 4215 0 R 4216 0 R 4217 0 R 4218 0 R 4219 0 R 4220 0 R 4221 0 R 4222 0 R 4223 0 R 4224 0 R 4225 0 R 4226 0 R 4227 0 R 4228 0 R 4229 0 R 4230 0 R 4231 0 R 4232 0 R 4233 0 R 4234 0 R 4235 0 R 4236 0 R 4237 0 R 4238 0 R 4239 0 R 4240 0 R 4241 0 R]
+/Annots [4184 0 R 4185 0 R 4186 0 R 4187 0 R 4188 0 R 4189 0 R 4190 0 R 4191 0 R 4192 0 R 4193 0 R 4194 0 R 4195 0 R 4196 0 R 4197 0 R 4198 0 R 4199 0 R 4200 0 R 4201 0 R 4202 0 R 4203 0 R 4204 0 R 4205 0 R 4206 0 R 4207 0 R 4208 0 R 4209 0 R 4210 0 R 4211 0 R 4212 0 R 4213 0 R 4214 0 R 4215 0 R 4216 0 R 4217 0 R 4218 0 R 4219 0 R 4220 0 R 4221 0 R 4222 0 R 4223 0 R 4224 0 R 4225 0 R 4226 0 R 4227 0 R 4228 0 R 4229 0 R 4230 0 R 4231 0 R 4232 0 R 4233 0 R 4234 0 R 4235 0 R 4236 0 R 4237 0 R 4238 0 R 4239 0 R 4240 0 R 4241 0 R 4242 0 R 4243 0 R 4244 0 R 4245 0 R 4246 0 R 4247 0 R 4248 0 R 4249 0 R 4250 0 R 4251 0 R 4252 0 R 4253 0 R 4254 0 R 4255 0 R 4256 0 R 4257 0 R 4258 0 R 4259 0 R]
 >>
 endobj
 13 0 obj
@@ -3417,7 +3417,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4242 0 R 4243 0 R 4244 0 R 4245 0 R 4246 0 R 4247 0 R 4248 0 R 4249 0 R 4250 0 R 4251 0 R 4252 0 R 4253 0 R 4254 0 R 4255 0 R 4256 0 R 4257 0 R 4258 0 R 4259 0 R 4260 0 R 4261 0 R 4262 0 R 4263 0 R 4264 0 R 4265 0 R 4266 0 R 4267 0 R 4268 0 R 4269 0 R 4270 0 R 4271 0 R 4272 0 R 4273 0 R 4274 0 R 4275 0 R 4276 0 R 4277 0 R 4278 0 R 4279 0 R 4280 0 R 4281 0 R 4282 0 R 4283 0 R 4284 0 R 4285 0 R 4286 0 R 4287 0 R 4288 0 R 4289 0 R 4290 0 R 4291 0 R 4292 0 R 4293 0 R 4294 0 R 4295 0 R 4296 0 R 4297 0 R 4298 0 R 4299 0 R 4300 0 R 4301 0 R 4302 0 R 4303 0 R 4304 0 R 4305 0 R 4306 0 R 4307 0 R 4308 0 R 4309 0 R 4310 0 R 4311 0 R 4312 0 R 4313 0 R 4314 0 R 4315 0 R 4316 0 R 4317 0 R 4318 0 R 4319 0 R 4320 0 R 4321 0 R 4322 0 R 4323 0 R]
+/Annots [4260 0 R 4261 0 R 4262 0 R 4263 0 R 4264 0 R 4265 0 R 4266 0 R 4267 0 R 4268 0 R 4269 0 R 4270 0 R 4271 0 R 4272 0 R 4273 0 R 4274 0 R 4275 0 R 4276 0 R 4277 0 R 4278 0 R 4279 0 R 4280 0 R 4281 0 R 4282 0 R 4283 0 R 4284 0 R 4285 0 R 4286 0 R 4287 0 R 4288 0 R 4289 0 R 4290 0 R 4291 0 R 4292 0 R 4293 0 R 4294 0 R 4295 0 R 4296 0 R 4297 0 R 4298 0 R 4299 0 R 4300 0 R 4301 0 R 4302 0 R 4303 0 R 4304 0 R 4305 0 R 4306 0 R 4307 0 R 4308 0 R 4309 0 R 4310 0 R 4311 0 R 4312 0 R 4313 0 R 4314 0 R 4315 0 R 4316 0 R 4317 0 R 4318 0 R 4319 0 R 4320 0 R 4321 0 R 4322 0 R 4323 0 R 4324 0 R 4325 0 R 4326 0 R 4327 0 R 4328 0 R 4329 0 R 4330 0 R 4331 0 R 4332 0 R 4333 0 R 4334 0 R 4335 0 R 4336 0 R 4337 0 R 4338 0 R 4339 0 R 4340 0 R 4341 0 R]
 >>
 endobj
 15 0 obj
@@ -3542,7 +3542,7 @@ ET
 BT
 529.4315 757.716 Td
 /F1.0 10.5 Tf
-<323735> Tj
+<323737> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3582,7 +3582,7 @@ ET
 BT
 529.4315 739.236 Td
 /F1.0 10.5 Tf
-<333032> Tj
+<333034> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3622,7 +3622,7 @@ ET
 BT
 529.4315 720.756 Td
 /F1.0 10.5 Tf
-<333034> Tj
+<333036> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3662,7 +3662,7 @@ ET
 BT
 529.4315 702.276 Td
 /F1.0 10.5 Tf
-<333035> Tj
+<333037> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3702,7 +3702,7 @@ ET
 BT
 529.4315 683.796 Td
 /F1.0 10.5 Tf
-<333137> Tj
+<333139> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3742,7 +3742,7 @@ ET
 BT
 529.4315 665.316 Td
 /F1.0 10.5 Tf
-<333232> Tj
+<333234> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3782,7 +3782,7 @@ ET
 BT
 529.4315 646.836 Td
 /F1.0 10.5 Tf
-<333233> Tj
+<333235> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3822,7 +3822,7 @@ ET
 BT
 529.4315 628.356 Td
 /F1.0 10.5 Tf
-<333234> Tj
+<333236> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3862,7 +3862,7 @@ ET
 BT
 529.4315 609.876 Td
 /F1.0 10.5 Tf
-<333236> Tj
+<333238> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3902,7 +3902,7 @@ ET
 BT
 529.4315 591.396 Td
 /F1.0 10.5 Tf
-<333237> Tj
+<333239> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3942,7 +3942,7 @@ ET
 BT
 529.4315 572.916 Td
 /F1.0 10.5 Tf
-<333238> Tj
+<333330> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -3982,7 +3982,7 @@ ET
 BT
 529.4315 554.436 Td
 /F1.0 10.5 Tf
-<333239> Tj
+<333331> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4022,7 +4022,7 @@ ET
 BT
 529.4315 535.956 Td
 /F1.0 10.5 Tf
-<333331> Tj
+<333333> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4062,7 +4062,7 @@ ET
 BT
 529.4315 517.476 Td
 /F1.0 10.5 Tf
-<333333> Tj
+<333335> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4102,7 +4102,7 @@ ET
 BT
 529.4315 498.996 Td
 /F1.0 10.5 Tf
-<333339> Tj
+<333431> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4142,7 +4142,7 @@ ET
 BT
 529.4315 480.516 Td
 /F1.0 10.5 Tf
-<333432> Tj
+<333434> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4182,7 +4182,7 @@ ET
 BT
 529.4315 462.036 Td
 /F1.0 10.5 Tf
-<333433> Tj
+<333435> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4222,7 +4222,7 @@ ET
 BT
 529.4315 443.556 Td
 /F1.0 10.5 Tf
-<333435> Tj
+<333437> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4262,7 +4262,7 @@ ET
 BT
 529.4315 425.076 Td
 /F1.0 10.5 Tf
-<333438> Tj
+<333530> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4302,7 +4302,7 @@ ET
 BT
 529.4315 406.596 Td
 /F1.0 10.5 Tf
-<333439> Tj
+<333531> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4342,7 +4342,7 @@ ET
 BT
 529.4315 388.116 Td
 /F1.0 10.5 Tf
-<333530> Tj
+<333532> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4382,7 +4382,7 @@ ET
 BT
 529.4315 369.636 Td
 /F1.0 10.5 Tf
-<333531> Tj
+<333533> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4422,7 +4422,7 @@ ET
 BT
 529.4315 351.156 Td
 /F1.0 10.5 Tf
-<333533> Tj
+<333535> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4462,7 +4462,7 @@ ET
 BT
 529.4315 332.676 Td
 /F1.0 10.5 Tf
-<333535> Tj
+<333537> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4502,7 +4502,7 @@ ET
 BT
 529.4315 314.196 Td
 /F1.0 10.5 Tf
-<333536> Tj
+<333538> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4542,7 +4542,7 @@ ET
 BT
 529.4315 295.716 Td
 /F1.0 10.5 Tf
-<333538> Tj
+<333630> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4582,7 +4582,7 @@ ET
 BT
 529.4315 277.236 Td
 /F1.0 10.5 Tf
-<333539> Tj
+<333631> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4622,7 +4622,7 @@ ET
 BT
 529.4315 258.756 Td
 /F1.0 10.5 Tf
-<333730> Tj
+<333732> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4662,7 +4662,7 @@ ET
 BT
 529.4315 240.276 Td
 /F1.0 10.5 Tf
-<333731> Tj
+<333733> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4702,7 +4702,7 @@ ET
 BT
 529.4315 221.796 Td
 /F1.0 10.5 Tf
-<333732> Tj
+<333734> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4742,7 +4742,7 @@ ET
 BT
 529.4315 203.316 Td
 /F1.0 10.5 Tf
-<333735> Tj
+<333737> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4782,7 +4782,7 @@ ET
 BT
 529.4315 184.836 Td
 /F1.0 10.5 Tf
-<333737> Tj
+<333739> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4822,7 +4822,7 @@ ET
 BT
 529.4315 166.356 Td
 /F1.0 10.5 Tf
-<333830> Tj
+<333832> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4862,7 +4862,7 @@ ET
 BT
 529.4315 147.876 Td
 /F1.0 10.5 Tf
-<333831> Tj
+<333833> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4902,7 +4902,7 @@ ET
 BT
 529.4315 129.396 Td
 /F1.0 10.5 Tf
-<333837> Tj
+<333839> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4942,7 +4942,7 @@ ET
 BT
 529.4315 110.916 Td
 /F1.0 10.5 Tf
-<333838> Tj
+<333930> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -4982,7 +4982,7 @@ ET
 BT
 529.4315 92.436 Td
 /F1.0 10.5 Tf
-<333931> Tj
+<333933> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5022,7 +5022,7 @@ ET
 BT
 529.4315 73.956 Td
 /F1.0 10.5 Tf
-<333933> Tj
+<333935> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5062,7 +5062,7 @@ ET
 BT
 529.4315 55.476 Td
 /F1.0 10.5 Tf
-<333937> Tj
+<333939> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5084,7 +5084,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4324 0 R 4325 0 R 4326 0 R 4327 0 R 4328 0 R 4329 0 R 4330 0 R 4331 0 R 4332 0 R 4333 0 R 4334 0 R 4335 0 R 4336 0 R 4337 0 R 4338 0 R 4339 0 R 4340 0 R 4341 0 R 4342 0 R 4343 0 R 4344 0 R 4345 0 R 4346 0 R 4347 0 R 4348 0 R 4349 0 R 4350 0 R 4351 0 R 4352 0 R 4353 0 R 4354 0 R 4355 0 R 4356 0 R 4357 0 R 4358 0 R 4359 0 R 4360 0 R 4361 0 R 4362 0 R 4363 0 R 4364 0 R 4365 0 R 4366 0 R 4367 0 R 4368 0 R 4369 0 R 4370 0 R 4371 0 R 4372 0 R 4373 0 R 4374 0 R 4375 0 R 4376 0 R 4377 0 R 4378 0 R 4379 0 R 4380 0 R 4381 0 R 4382 0 R 4383 0 R 4384 0 R 4385 0 R 4386 0 R 4387 0 R 4388 0 R 4389 0 R 4390 0 R 4391 0 R 4392 0 R 4393 0 R 4394 0 R 4395 0 R 4396 0 R 4397 0 R 4398 0 R 4399 0 R 4400 0 R 4401 0 R 4402 0 R 4403 0 R 4404 0 R 4405 0 R]
+/Annots [4342 0 R 4343 0 R 4344 0 R 4345 0 R 4346 0 R 4347 0 R 4348 0 R 4349 0 R 4350 0 R 4351 0 R 4352 0 R 4353 0 R 4354 0 R 4355 0 R 4356 0 R 4357 0 R 4358 0 R 4359 0 R 4360 0 R 4361 0 R 4362 0 R 4363 0 R 4364 0 R 4365 0 R 4366 0 R 4367 0 R 4368 0 R 4369 0 R 4370 0 R 4371 0 R 4372 0 R 4373 0 R 4374 0 R 4375 0 R 4376 0 R 4377 0 R 4378 0 R 4379 0 R 4380 0 R 4381 0 R 4382 0 R 4383 0 R 4384 0 R 4385 0 R 4386 0 R 4387 0 R 4388 0 R 4389 0 R 4390 0 R 4391 0 R 4392 0 R 4393 0 R 4394 0 R 4395 0 R 4396 0 R 4397 0 R 4398 0 R 4399 0 R 4400 0 R 4401 0 R 4402 0 R 4403 0 R 4404 0 R 4405 0 R 4406 0 R 4407 0 R 4408 0 R 4409 0 R 4410 0 R 4411 0 R 4412 0 R 4413 0 R 4414 0 R 4415 0 R 4416 0 R 4417 0 R 4418 0 R 4419 0 R 4420 0 R 4421 0 R 4422 0 R 4423 0 R]
 >>
 endobj
 17 0 obj
@@ -5129,7 +5129,7 @@ ET
 BT
 529.4315 794.676 Td
 /F1.0 10.5 Tf
-<343034> Tj
+<343036> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5169,7 +5169,7 @@ ET
 BT
 529.4315 776.196 Td
 /F1.0 10.5 Tf
-<343035> Tj
+<343037> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5209,7 +5209,7 @@ ET
 BT
 529.4315 757.716 Td
 /F1.0 10.5 Tf
-<343036> Tj
+<343038> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5249,7 +5249,7 @@ ET
 BT
 529.4315 739.236 Td
 /F1.0 10.5 Tf
-<343038> Tj
+<343130> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5289,7 +5289,7 @@ ET
 BT
 529.4315 720.756 Td
 /F1.0 10.5 Tf
-<343133> Tj
+<343135> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5329,7 +5329,7 @@ ET
 BT
 529.4315 702.276 Td
 /F1.0 10.5 Tf
-<343139> Tj
+<343231> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5369,7 +5369,7 @@ ET
 BT
 529.4315 683.796 Td
 /F1.0 10.5 Tf
-<343231> Tj
+<343233> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5409,7 +5409,7 @@ ET
 BT
 529.4315 665.316 Td
 /F1.0 10.5 Tf
-<343232> Tj
+<343234> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5449,7 +5449,7 @@ ET
 BT
 529.4315 646.836 Td
 /F1.0 10.5 Tf
-<343233> Tj
+<343235> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5489,7 +5489,7 @@ ET
 BT
 529.4315 628.356 Td
 /F1.0 10.5 Tf
-<343234> Tj
+<343236> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5529,7 +5529,7 @@ ET
 BT
 529.4315 609.876 Td
 /F1.0 10.5 Tf
-<343236> Tj
+<343238> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5569,7 +5569,7 @@ ET
 BT
 529.4315 591.396 Td
 /F1.0 10.5 Tf
-<343237> Tj
+<343239> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5609,7 +5609,7 @@ ET
 BT
 529.4315 572.916 Td
 /F1.0 10.5 Tf
-<343331> Tj
+<343333> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5649,7 +5649,7 @@ ET
 BT
 529.4315 554.436 Td
 /F1.0 10.5 Tf
-<343332> Tj
+<343334> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5689,7 +5689,7 @@ ET
 BT
 529.4315 535.956 Td
 /F1.0 10.5 Tf
-<343336> Tj
+<343338> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5729,7 +5729,7 @@ ET
 BT
 529.4315 517.476 Td
 /F1.0 10.5 Tf
-<343337> Tj
+<343339> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5769,7 +5769,7 @@ ET
 BT
 529.4315 498.996 Td
 /F1.0 10.5 Tf
-<343430> Tj
+<343432> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5809,7 +5809,7 @@ ET
 BT
 529.4315 480.516 Td
 /F1.0 10.5 Tf
-<343435> Tj
+<343437> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5849,7 +5849,7 @@ ET
 BT
 529.4315 462.036 Td
 /F1.0 10.5 Tf
-<343436> Tj
+<343438> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5889,7 +5889,7 @@ ET
 BT
 529.4315 443.556 Td
 /F1.0 10.5 Tf
-<343438> Tj
+<343530> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5929,7 +5929,7 @@ ET
 BT
 529.4315 425.076 Td
 /F1.0 10.5 Tf
-<343439> Tj
+<343531> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -5969,7 +5969,7 @@ ET
 BT
 529.4315 406.596 Td
 /F1.0 10.5 Tf
-<343530> Tj
+<343532> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6009,7 +6009,7 @@ ET
 BT
 529.4315 388.116 Td
 /F1.0 10.5 Tf
-<343531> Tj
+<343533> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6049,7 +6049,7 @@ ET
 BT
 529.4315 369.636 Td
 /F1.0 10.5 Tf
-<343532> Tj
+<343534> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6089,7 +6089,7 @@ ET
 BT
 529.4315 351.156 Td
 /F1.0 10.5 Tf
-<343533> Tj
+<343535> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6129,7 +6129,7 @@ ET
 BT
 529.4315 332.676 Td
 /F1.0 10.5 Tf
-<343537> Tj
+<343539> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6169,7 +6169,7 @@ ET
 BT
 529.4315 314.196 Td
 /F1.0 10.5 Tf
-<343538> Tj
+<343630> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6209,7 +6209,7 @@ ET
 BT
 529.4315 295.716 Td
 /F1.0 10.5 Tf
-<343636> Tj
+<343638> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6249,7 +6249,7 @@ ET
 BT
 529.4315 277.236 Td
 /F1.0 10.5 Tf
-<343730> Tj
+<343732> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6289,7 +6289,7 @@ ET
 BT
 529.4315 258.756 Td
 /F1.0 10.5 Tf
-<343732> Tj
+<343734> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6329,7 +6329,7 @@ ET
 BT
 529.4315 240.276 Td
 /F1.0 10.5 Tf
-<343735> Tj
+<343737> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6369,7 +6369,7 @@ ET
 BT
 529.4315 221.796 Td
 /F1.0 10.5 Tf
-<343736> Tj
+<343738> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6409,7 +6409,7 @@ ET
 BT
 529.4315 203.316 Td
 /F1.0 10.5 Tf
-<343833> Tj
+<343835> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6449,7 +6449,7 @@ ET
 BT
 529.4315 184.836 Td
 /F1.0 10.5 Tf
-<343835> Tj
+<343837> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6489,7 +6489,7 @@ ET
 BT
 529.4315 166.356 Td
 /F1.0 10.5 Tf
-<343836> Tj
+<343838> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6529,7 +6529,7 @@ ET
 BT
 529.4315 147.876 Td
 /F1.0 10.5 Tf
-<343837> Tj
+<343839> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6569,7 +6569,7 @@ ET
 BT
 529.4315 129.396 Td
 /F1.0 10.5 Tf
-<343838> Tj
+<343930> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6609,7 +6609,7 @@ ET
 BT
 529.4315 110.916 Td
 /F1.0 10.5 Tf
-<343931> Tj
+<343933> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6649,7 +6649,7 @@ ET
 BT
 529.4315 92.436 Td
 /F1.0 10.5 Tf
-<343932> Tj
+<343934> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6689,7 +6689,7 @@ ET
 BT
 529.4315 73.956 Td
 /F1.0 10.5 Tf
-<343933> Tj
+<343935> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6729,7 +6729,7 @@ ET
 BT
 529.4315 55.476 Td
 /F1.0 10.5 Tf
-<343934> Tj
+<343936> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6751,7 +6751,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4406 0 R 4407 0 R 4408 0 R 4409 0 R 4410 0 R 4411 0 R 4412 0 R 4413 0 R 4414 0 R 4415 0 R 4416 0 R 4417 0 R 4418 0 R 4419 0 R 4420 0 R 4421 0 R 4422 0 R 4423 0 R 4424 0 R 4425 0 R 4426 0 R 4427 0 R 4428 0 R 4429 0 R 4430 0 R 4431 0 R 4432 0 R 4433 0 R 4434 0 R 4435 0 R 4436 0 R 4437 0 R 4438 0 R 4439 0 R 4440 0 R 4441 0 R 4442 0 R 4443 0 R 4444 0 R 4445 0 R 4446 0 R 4447 0 R 4448 0 R 4449 0 R 4450 0 R 4451 0 R 4452 0 R 4453 0 R 4454 0 R 4455 0 R 4456 0 R 4457 0 R 4458 0 R 4459 0 R 4460 0 R 4461 0 R 4462 0 R 4463 0 R 4464 0 R 4465 0 R 4466 0 R 4467 0 R 4468 0 R 4469 0 R 4470 0 R 4471 0 R 4472 0 R 4473 0 R 4474 0 R 4475 0 R 4476 0 R 4477 0 R 4478 0 R 4479 0 R 4480 0 R 4481 0 R 4482 0 R 4483 0 R 4484 0 R 4485 0 R 4486 0 R 4487 0 R]
+/Annots [4424 0 R 4425 0 R 4426 0 R 4427 0 R 4428 0 R 4429 0 R 4430 0 R 4431 0 R 4432 0 R 4433 0 R 4434 0 R 4435 0 R 4436 0 R 4437 0 R 4438 0 R 4439 0 R 4440 0 R 4441 0 R 4442 0 R 4443 0 R 4444 0 R 4445 0 R 4446 0 R 4447 0 R 4448 0 R 4449 0 R 4450 0 R 4451 0 R 4452 0 R 4453 0 R 4454 0 R 4455 0 R 4456 0 R 4457 0 R 4458 0 R 4459 0 R 4460 0 R 4461 0 R 4462 0 R 4463 0 R 4464 0 R 4465 0 R 4466 0 R 4467 0 R 4468 0 R 4469 0 R 4470 0 R 4471 0 R 4472 0 R 4473 0 R 4474 0 R 4475 0 R 4476 0 R 4477 0 R 4478 0 R 4479 0 R 4480 0 R 4481 0 R 4482 0 R 4483 0 R 4484 0 R 4485 0 R 4486 0 R 4487 0 R 4488 0 R 4489 0 R 4490 0 R 4491 0 R 4492 0 R 4493 0 R 4494 0 R 4495 0 R 4496 0 R 4497 0 R 4498 0 R 4499 0 R 4500 0 R 4501 0 R 4502 0 R 4503 0 R 4504 0 R 4505 0 R]
 >>
 endobj
 19 0 obj
@@ -6796,7 +6796,7 @@ ET
 BT
 529.4315 794.676 Td
 /F1.0 10.5 Tf
-<343935> Tj
+<343937> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6836,7 +6836,7 @@ ET
 BT
 529.4315 776.196 Td
 /F1.0 10.5 Tf
-<343936> Tj
+<343938> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6876,7 +6876,7 @@ ET
 BT
 529.4315 757.716 Td
 /F1.0 10.5 Tf
-<343937> Tj
+<343939> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6916,7 +6916,7 @@ ET
 BT
 529.4315 739.236 Td
 /F1.0 10.5 Tf
-<343938> Tj
+<353030> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6956,7 +6956,7 @@ ET
 BT
 529.4315 720.756 Td
 /F1.0 10.5 Tf
-<343939> Tj
+<353031> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -6996,7 +6996,7 @@ ET
 BT
 529.4315 702.276 Td
 /F1.0 10.5 Tf
-<353033> Tj
+<353035> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7036,7 +7036,7 @@ ET
 BT
 529.4315 683.796 Td
 /F1.0 10.5 Tf
-<353034> Tj
+<353036> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7076,7 +7076,7 @@ ET
 BT
 529.4315 665.316 Td
 /F1.0 10.5 Tf
-<353232> Tj
+<353234> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7116,7 +7116,7 @@ ET
 BT
 529.4315 646.836 Td
 /F1.0 10.5 Tf
-<353233> Tj
+<353235> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7156,7 +7156,7 @@ ET
 BT
 529.4315 628.356 Td
 /F1.0 10.5 Tf
-<353239> Tj
+<353331> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7196,7 +7196,7 @@ ET
 BT
 529.4315 609.876 Td
 /F1.0 10.5 Tf
-<353334> Tj
+<353336> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7236,7 +7236,7 @@ ET
 BT
 529.4315 591.396 Td
 /F1.0 10.5 Tf
-<353338> Tj
+<353430> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7276,7 +7276,7 @@ ET
 BT
 529.4315 572.916 Td
 /F1.0 10.5 Tf
-<353531> Tj
+<353533> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7316,7 +7316,7 @@ ET
 BT
 529.4315 554.436 Td
 /F1.0 10.5 Tf
-<353539> Tj
+<353631> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7356,7 +7356,7 @@ ET
 BT
 529.4315 535.956 Td
 /F1.0 10.5 Tf
-<353631> Tj
+<353633> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7396,7 +7396,7 @@ ET
 BT
 529.4315 517.476 Td
 /F1.0 10.5 Tf
-<353635> Tj
+<353637> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7436,7 +7436,7 @@ ET
 BT
 529.4315 498.996 Td
 /F1.0 10.5 Tf
-<353636> Tj
+<353638> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7476,7 +7476,7 @@ ET
 BT
 529.4315 480.516 Td
 /F1.0 10.5 Tf
-<353730> Tj
+<353732> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7516,7 +7516,7 @@ ET
 BT
 529.4315 462.036 Td
 /F1.0 10.5 Tf
-<353731> Tj
+<353733> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7556,7 +7556,7 @@ ET
 BT
 529.4315 443.556 Td
 /F1.0 10.5 Tf
-<353734> Tj
+<353736> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7596,7 +7596,7 @@ ET
 BT
 529.4315 425.076 Td
 /F1.0 10.5 Tf
-<353739> Tj
+<353831> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7636,7 +7636,7 @@ ET
 BT
 529.4315 406.596 Td
 /F1.0 10.5 Tf
-<353830> Tj
+<353832> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7676,7 +7676,7 @@ ET
 BT
 529.4315 388.116 Td
 /F1.0 10.5 Tf
-<353833> Tj
+<353835> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7716,7 +7716,7 @@ ET
 BT
 529.4315 369.636 Td
 /F1.0 10.5 Tf
-<353834> Tj
+<353836> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7756,7 +7756,7 @@ ET
 BT
 529.4315 351.156 Td
 /F1.0 10.5 Tf
-<353837> Tj
+<353839> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7796,7 +7796,7 @@ ET
 BT
 529.4315 332.676 Td
 /F1.0 10.5 Tf
-<353931> Tj
+<353933> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7836,7 +7836,7 @@ ET
 BT
 529.4315 314.196 Td
 /F1.0 10.5 Tf
-<353939> Tj
+<363031> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7876,7 +7876,7 @@ ET
 BT
 529.4315 295.716 Td
 /F1.0 10.5 Tf
-<363030> Tj
+<363032> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7927,7 +7927,7 @@ ET
 BT
 529.4315 277.236 Td
 /F1.0 10.5 Tf
-<363031> Tj
+<363033> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -7967,7 +7967,7 @@ ET
 BT
 529.4315 258.756 Td
 /F1.0 10.5 Tf
-<363032> Tj
+<363034> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8007,7 +8007,7 @@ ET
 BT
 529.4315 240.276 Td
 /F1.0 10.5 Tf
-<363136> Tj
+<363138> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8047,7 +8047,7 @@ ET
 BT
 529.4315 221.796 Td
 /F1.0 10.5 Tf
-<363330> Tj
+<363332> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8087,7 +8087,7 @@ ET
 BT
 529.4315 203.316 Td
 /F1.0 10.5 Tf
-<363331> Tj
+<363333> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8127,7 +8127,7 @@ ET
 BT
 529.4315 184.836 Td
 /F1.0 10.5 Tf
-<363333> Tj
+<363335> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8167,7 +8167,7 @@ ET
 BT
 529.4315 166.356 Td
 /F1.0 10.5 Tf
-<363335> Tj
+<363337> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8207,7 +8207,7 @@ ET
 BT
 529.4315 147.876 Td
 /F1.0 10.5 Tf
-<363337> Tj
+<363339> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8247,7 +8247,7 @@ ET
 BT
 529.4315 129.396 Td
 /F1.0 10.5 Tf
-<363339> Tj
+<363431> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8287,7 +8287,7 @@ ET
 BT
 529.4315 110.916 Td
 /F1.0 10.5 Tf
-<363430> Tj
+<363432> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8327,7 +8327,7 @@ ET
 BT
 529.4315 92.436 Td
 /F1.0 10.5 Tf
-<363432> Tj
+<363434> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8367,7 +8367,7 @@ ET
 BT
 529.4315 73.956 Td
 /F1.0 10.5 Tf
-<363434> Tj
+<363436> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8407,7 +8407,7 @@ ET
 BT
 529.4315 55.476 Td
 /F1.0 10.5 Tf
-<363435> Tj
+<363437> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8429,7 +8429,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4488 0 R 4489 0 R 4490 0 R 4491 0 R 4492 0 R 4493 0 R 4494 0 R 4495 0 R 4496 0 R 4497 0 R 4498 0 R 4499 0 R 4500 0 R 4501 0 R 4502 0 R 4503 0 R 4504 0 R 4505 0 R 4506 0 R 4507 0 R 4508 0 R 4509 0 R 4510 0 R 4511 0 R 4512 0 R 4513 0 R 4514 0 R 4515 0 R 4516 0 R 4517 0 R 4518 0 R 4519 0 R 4520 0 R 4521 0 R 4522 0 R 4523 0 R 4524 0 R 4525 0 R 4526 0 R 4527 0 R 4528 0 R 4529 0 R 4530 0 R 4531 0 R 4532 0 R 4533 0 R 4534 0 R 4535 0 R 4536 0 R 4537 0 R 4538 0 R 4539 0 R 4540 0 R 4541 0 R 4542 0 R 4543 0 R 4544 0 R 4545 0 R 4546 0 R 4547 0 R 4548 0 R 4549 0 R 4550 0 R 4551 0 R 4552 0 R 4553 0 R 4554 0 R 4555 0 R 4556 0 R 4557 0 R 4558 0 R 4559 0 R 4560 0 R 4561 0 R 4562 0 R 4563 0 R 4564 0 R 4565 0 R 4566 0 R 4567 0 R 4568 0 R 4569 0 R 4570 0 R 4571 0 R]
+/Annots [4506 0 R 4507 0 R 4508 0 R 4509 0 R 4510 0 R 4511 0 R 4512 0 R 4513 0 R 4514 0 R 4515 0 R 4516 0 R 4517 0 R 4518 0 R 4519 0 R 4520 0 R 4521 0 R 4522 0 R 4523 0 R 4524 0 R 4525 0 R 4526 0 R 4527 0 R 4528 0 R 4529 0 R 4530 0 R 4531 0 R 4532 0 R 4533 0 R 4534 0 R 4535 0 R 4536 0 R 4537 0 R 4538 0 R 4539 0 R 4540 0 R 4541 0 R 4542 0 R 4543 0 R 4544 0 R 4545 0 R 4546 0 R 4547 0 R 4548 0 R 4549 0 R 4550 0 R 4551 0 R 4552 0 R 4553 0 R 4554 0 R 4555 0 R 4556 0 R 4557 0 R 4558 0 R 4559 0 R 4560 0 R 4561 0 R 4562 0 R 4563 0 R 4564 0 R 4565 0 R 4566 0 R 4567 0 R 4568 0 R 4569 0 R 4570 0 R 4571 0 R 4572 0 R 4573 0 R 4574 0 R 4575 0 R 4576 0 R 4577 0 R 4578 0 R 4579 0 R 4580 0 R 4581 0 R 4582 0 R 4583 0 R 4584 0 R 4585 0 R 4586 0 R 4587 0 R 4588 0 R 4589 0 R]
 >>
 endobj
 21 0 obj
@@ -8474,7 +8474,7 @@ ET
 BT
 529.4315 794.676 Td
 /F1.0 10.5 Tf
-<363532> Tj
+<363534> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8514,7 +8514,7 @@ ET
 BT
 529.4315 776.196 Td
 /F1.0 10.5 Tf
-<363533> Tj
+<363535> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8554,7 +8554,7 @@ ET
 BT
 529.4315 757.716 Td
 /F1.0 10.5 Tf
-<363534> Tj
+<363536> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8594,7 +8594,7 @@ ET
 BT
 529.4315 739.236 Td
 /F1.0 10.5 Tf
-<363535> Tj
+<363537> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8634,7 +8634,7 @@ ET
 BT
 529.4315 720.756 Td
 /F1.0 10.5 Tf
-<363536> Tj
+<363538> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8674,7 +8674,7 @@ ET
 BT
 529.4315 702.276 Td
 /F1.0 10.5 Tf
-<363537> Tj
+<363539> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8714,7 +8714,7 @@ ET
 BT
 529.4315 683.796 Td
 /F1.0 10.5 Tf
-<363638> Tj
+<363730> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8754,7 +8754,7 @@ ET
 BT
 529.4315 665.316 Td
 /F1.0 10.5 Tf
-<363731> Tj
+<363733> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8794,7 +8794,7 @@ ET
 BT
 529.4315 646.836 Td
 /F1.0 10.5 Tf
-<363735> Tj
+<363737> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8834,7 +8834,7 @@ ET
 BT
 529.4315 628.356 Td
 /F1.0 10.5 Tf
-<363831> Tj
+<363833> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8874,7 +8874,7 @@ ET
 BT
 529.4315 609.876 Td
 /F1.0 10.5 Tf
-<363932> Tj
+<363934> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8914,7 +8914,7 @@ ET
 BT
 529.4315 591.396 Td
 /F1.0 10.5 Tf
-<363933> Tj
+<363935> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8954,7 +8954,7 @@ ET
 BT
 529.4315 572.916 Td
 /F1.0 10.5 Tf
-<363934> Tj
+<363936> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -8994,7 +8994,7 @@ ET
 BT
 529.4315 554.436 Td
 /F1.0 10.5 Tf
-<373033> Tj
+<373035> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9034,7 +9034,7 @@ ET
 BT
 529.4315 535.956 Td
 /F1.0 10.5 Tf
-<373034> Tj
+<373036> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9074,7 +9074,7 @@ ET
 BT
 529.4315 517.476 Td
 /F1.0 10.5 Tf
-<373035> Tj
+<373037> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9114,7 +9114,7 @@ ET
 BT
 529.4315 498.996 Td
 /F1.0 10.5 Tf
-<373036> Tj
+<373038> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9154,7 +9154,7 @@ ET
 BT
 529.4315 480.516 Td
 /F1.0 10.5 Tf
-<373037> Tj
+<373039> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9194,7 +9194,7 @@ ET
 BT
 529.4315 462.036 Td
 /F1.0 10.5 Tf
-<373039> Tj
+<373131> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9234,7 +9234,7 @@ ET
 BT
 529.4315 443.556 Td
 /F1.0 10.5 Tf
-<373130> Tj
+<373132> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9274,7 +9274,7 @@ ET
 BT
 529.4315 425.076 Td
 /F1.0 10.5 Tf
-<373131> Tj
+<373133> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -9296,7 +9296,7 @@ endobj
 /Font << /F1.0 10 0 R
 >>
 >>
-/Annots [4572 0 R 4573 0 R 4574 0 R 4575 0 R 4576 0 R 4577 0 R 4578 0 R 4579 0 R 4580 0 R 4581 0 R 4582 0 R 4583 0 R 4584 0 R 4585 0 R 4586 0 R 4587 0 R 4588 0 R 4589 0 R 4590 0 R 4591 0 R 4592 0 R 4593 0 R 4594 0 R 4595 0 R 4596 0 R 4597 0 R 4598 0 R 4599 0 R 4600 0 R 4601 0 R 4602 0 R 4603 0 R 4604 0 R 4605 0 R 4606 0 R 4607 0 R 4608 0 R 4609 0 R 4610 0 R 4611 0 R 4612 0 R 4613 0 R]
+/Annots [4590 0 R 4591 0 R 4592 0 R 4593 0 R 4594 0 R 4595 0 R 4596 0 R 4597 0 R 4598 0 R 4599 0 R 4600 0 R 4601 0 R 4602 0 R 4603 0 R 4604 0 R 4605 0 R 4606 0 R 4607 0 R 4608 0 R 4609 0 R 4610 0 R 4611 0 R 4612 0 R 4613 0 R 4614 0 R 4615 0 R 4616 0 R 4617 0 R 4618 0 R 4619 0 R 4620 0 R 4621 0 R 4622 0 R 4623 0 R 4624 0 R 4625 0 R 4626 0 R 4627 0 R 4628 0 R 4629 0 R 4630 0 R 4631 0 R]
 >>
 endobj
 23 0 obj
@@ -10108,7 +10108,7 @@ endobj
 /F4.0 35 0 R
 /F1.1 38 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [30 0 R 31 0 R 32 0 R 34 0 R 36 0 R 37 0 R 39 0 R 40 0 R 41 0 R]
@@ -10123,7 +10123,7 @@ endobj
 >>
 endobj
 27 0 obj
-<< /Kids [643 0 R 3633 0 R 1915 0 R 644 0 R 3992 0 R 1159 0 R 2530 0 R 3764 0 R]
+<< /Kids [643 0 R 3651 0 R 1933 0 R 644 0 R 4011 0 R 1163 0 R 2548 0 R 3783 0 R]
 >>
 endobj
 28 0 obj
@@ -10133,11 +10133,11 @@ endobj
 << /Type /Font
 /BaseFont /358635+NotoSerif-Bold
 /Subtype /TrueType
-/FontDescriptor 4848 0 R
+/FontDescriptor 4866 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 4850 0 R
-/ToUnicode 4849 0 R
+/Widths 4868 0 R
+/ToUnicode 4867 0 R
 >>
 endobj
 30 0 obj
@@ -10177,11 +10177,11 @@ endobj
 << /Type /Font
 /BaseFont /260f03+NotoSerif-Italic
 /Subtype /TrueType
-/FontDescriptor 4852 0 R
+/FontDescriptor 4870 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 4854 0 R
-/ToUnicode 4853 0 R
+/Widths 4872 0 R
+/ToUnicode 4871 0 R
 >>
 endobj
 34 0 obj
@@ -10199,11 +10199,11 @@ endobj
 << /Type /Font
 /BaseFont /c7d210+mplus1mn-regular
 /Subtype /TrueType
-/FontDescriptor 4856 0 R
+/FontDescriptor 4874 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 4858 0 R
-/ToUnicode 4857 0 R
+/Widths 4876 0 R
+/ToUnicode 4875 0 R
 >>
 endobj
 36 0 obj
@@ -10229,11 +10229,11 @@ endobj
 << /Type /Font
 /BaseFont /34c70d+NotoSerif
 /Subtype /TrueType
-/FontDescriptor 4860 0 R
+/FontDescriptor 4878 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 4862 0 R
-/ToUnicode 4861 0 R
+/Widths 4880 0 R
+/ToUnicode 4879 0 R
 >>
 endobj
 39 0 obj
@@ -10667,7 +10667,7 @@ endobj
 /F5.1 45 0 R
 /F2.0 29 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -10676,11 +10676,11 @@ endobj
 << /Type /Font
 /BaseFont /26ec65+FontAwesome
 /Subtype /TrueType
-/FontDescriptor 4864 0 R
+/FontDescriptor 4882 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 4866 0 R
-/ToUnicode 4865 0 R
+/Widths 4884 0 R
+/ToUnicode 4883 0 R
 >>
 endobj
 46 0 obj
@@ -10738,7 +10738,7 @@ endobj
 /Font << /F2.0 29 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -10823,7 +10823,7 @@ endobj
 /Font << /F2.0 29 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [52 0 R]
@@ -12396,7 +12396,7 @@ endobj
 /F1.1 38 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [57 0 R 59 0 R]
@@ -13516,7 +13516,7 @@ endobj
 /F1.0 10 0 R
 /F5.1 45 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -14395,7 +14395,7 @@ endobj
 /F3.0 33 0 R
 /F5.1 45 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [64 0 R]
@@ -15185,7 +15185,7 @@ endobj
 /Font << /F4.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -15904,7 +15904,7 @@ endobj
 /Font << /F4.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -16780,7 +16780,7 @@ endobj
 /F2.0 29 0 R
 /F5.1 45 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [73 0 R 74 0 R 75 0 R 76 0 R]
@@ -17770,7 +17770,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [79 0 R]
@@ -18693,7 +18693,7 @@ endobj
 /F4.0 35 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -20204,7 +20204,7 @@ endobj
 /F4.0 35 0 R
 /F5.1 45 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [84 0 R 86 0 R]
@@ -21552,7 +21552,7 @@ endobj
 /F4.0 35 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [90 0 R]
@@ -22626,7 +22626,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -23349,7 +23349,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -23988,7 +23988,7 @@ endobj
 /F4.0 35 0 R
 /F2.0 29 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [97 0 R 98 0 R 99 0 R 101 0 R]
@@ -24232,7 +24232,7 @@ endobj
 /Font << /F2.0 29 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [105 0 R 106 0 R 107 0 R 108 0 R]
@@ -25104,7 +25104,7 @@ endobj
 /F4.0 35 0 R
 /F5.1 45 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [112 0 R 113 0 R]
@@ -25384,7 +25384,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -26864,7 +26864,7 @@ endobj
 /F5.1 45 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [120 0 R 121 0 R 122 0 R 123 0 R 124 0 R 125 0 R 126 0 R 128 0 R 129 0 R 130 0 R 131 0 R]
@@ -27904,7 +27904,7 @@ endobj
 /F2.0 29 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [134 0 R 136 0 R]
@@ -28657,7 +28657,7 @@ endobj
 /F2.0 29 0 R
 /F5.1 45 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [139 0 R 141 0 R 142 0 R 143 0 R 144 0 R]
@@ -31216,7 +31216,7 @@ endobj
 /F2.0 29 0 R
 /F5.1 45 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [147 0 R 148 0 R 149 0 R 150 0 R 151 0 R 152 0 R 153 0 R]
@@ -32017,7 +32017,7 @@ endobj
 /F2.0 29 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [159 0 R 160 0 R]
@@ -32028,12 +32028,12 @@ endobj
 endobj
 157 0 obj
 << /Limits [(__anchor-top) (adding.new.node)]
-/Names [(__anchor-top) 25 0 R (__indexterm-7552678) 3527 0 R (__indexterm-7555082) 3529 0 R (__indexterm-7556630) 3531 0 R (__indexterm-7559150) 3534 0 R (acid) 932 0 R (acl) 3338 0 R (add-metric-name-and-function-to-hadoop-compat-interface) 3627 0 R (add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3629 0 R (add.metrics) 3625 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3868 0 R (adding.new.node) 3084 0 R]
+/Names [(__anchor-top) 25 0 R (__indexterm-7613324) 3545 0 R (__indexterm-7615728) 3547 0 R (__indexterm-7617276) 3549 0 R (__indexterm-7619796) 3552 0 R (acid) 936 0 R (acl) 3356 0 R (add-metric-name-and-function-to-hadoop-compat-interface) 3645 0 R (add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3647 0 R (add.metrics) 3643 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3887 0 R (adding.new.node) 3102 0 R]
 >>
 endobj
 158 0 obj
 << /Limits [(io.storefile.bloom.block.size) (jdk-version-requirements)]
-/Names [(io.storefile.bloom.block.size) 358 0 R (irbrc) 795 0 R (irc) 3369 0 R (isolate-system-tables) 3335 0 R (java) 119 0 R (java-2) 1918 0 R (java-3) 1923 0 R (java.client.config) 516 0 R (jdk-issues) 2899 0 R (jdk-version-requirements) 56 0 R]
+/Names [(io.storefile.bloom.block.size) 358 0 R (irbrc) 799 0 R (irc) 3387 0 R (isolate-system-tables) 3353 0 R (java) 119 0 R (java-2) 1936 0 R (java-3) 1941 0 R (java.client.config) 516 0 R (jdk-issues) 2917 0 R (jdk-version-requirements) 56 0 R]
 >>
 endobj
 159 0 obj
@@ -33172,7 +33172,7 @@ endobj
 /F4.0 35 0 R
 /F1.1 38 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [165 0 R 166 0 R 168 0 R]
@@ -33972,7 +33972,7 @@ endobj
 /F5.1 45 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [173 0 R 174 0 R 175 0 R 177 0 R 178 0 R 180 0 R 181 0 R]
@@ -35444,7 +35444,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [184 0 R 185 0 R]
@@ -35869,7 +35869,7 @@ endobj
 /F4.0 35 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -36525,7 +36525,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [191 0 R]
@@ -37292,7 +37292,7 @@ endobj
 /F4.0 35 0 R
 /F1.1 38 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [196 0 R]
@@ -37823,7 +37823,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -37839,7 +37839,7 @@ endobj
 endobj
 206 0 obj
 << /Limits [(getshortmidpointkey-an-optimization-for-data-index-block) (handling-of-errors-during-log-splitting)]
-/Names [(getshortmidpointkey-an-optimization-for-data-index-block) 4049 0 R (getting.involved) 3358 0 R (getting_started) 48 0 R (git.best.practices) 3630 0 R (git.patch.flow) 3684 0 R (goals) 4143 0 R (guide-for-hbase-committers) 3658 0 R (guidelines-for-deploying-a-coprocessor) 2368 0 R (guidelines-for-reporting-effective-issues) 3376 0 R (hadoop) 140 0 R (hadoop.native.lib) 3965 0 R (hadoop.policy.file) 383 0 R (handling-of-errors-during-log-splitting) 1660 0 R]
+/Names [(getshortmidpointkey-an-optimization-for-data-index-block) 4068 0 R (getting.involved) 3376 0 R (getting_started) 48 0 R (git.best.practices) 3648 0 R (git.patch.flow) 3703 0 R (goals) 4162 0 R (guide-for-hbase-committers) 3677 0 R (guidelines-for-deploying-a-coprocessor) 2386 0 R (guidelines-for-reporting-effective-issues) 3394 0 R (hadoop) 140 0 R (hadoop.native.lib) 3984 0 R (hadoop.policy.file) 383 0 R (handling-of-errors-during-log-splitting) 1678 0 R]
 >>
 endobj
 207 0 obj
@@ -38369,7 +38369,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -38891,7 +38891,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -39526,7 +39526,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -40035,7 +40035,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -40566,7 +40566,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -41145,7 +41145,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [252 0 R 253 0 R]
@@ -41712,7 +41712,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [261 0 R 262 0 R 264 0 R 265 0 R]
@@ -41776,7 +41776,7 @@ endobj
 endobj
 268 0 obj
 << /Limits [(hbase.table.lock.enable) (hbase.tmp.dir)]
-/Names [(hbase.table.lock.enable) 408 0 R (hbase.table.max.rowsize) 411 0 R (hbase.tags) 1351 0 R (hbase.tests) 3510 0 R (hbase.tests.categories) 3554 0 R (hbase.tests.cluster) 3556 0 R (hbase.tests.example.code) 3557 0 R (hbase.tests.rules) 3550 0 R (hbase.tests.sleeps) 3555 0 R (hbase.tests.writing) 3549 0 R (hbase.thrift.maxQueuedRequests) 415 0 R (hbase.thrift.maxWorkerThreads) 414 0 R (hbase.thrift.minWorkerThreads) 413 0 R (hbase.tmp.dir) 198 0 R]
+/Names [(hbase.table.lock.enable) 408 0 R (hbase.table.max.rowsize) 411 0 R (hbase.tags) 1355 0 R (hbase.tests) 3528 0 R (hbase.tests.categories) 3572 0 R (hbase.tests.cluster) 3574 0 R (hbase.tests.example.code) 3575 0 R (hbase.tests.rules) 3568 0 R (hbase.tests.sleeps) 3573 0 R (hbase.tests.writing) 3567 0 R (hbase.thrift.maxQueuedRequests) 415 0 R (hbase.thrift.maxWorkerThreads) 414 0 R (hbase.thrift.minWorkerThreads) 413 0 R (hbase.tmp.dir) 198 0 R]
 >>
 endobj
 269 0 obj
@@ -42332,7 +42332,7 @@ endobj
 /F1.0 10 0 R
 /F1.1 38 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -42927,7 +42927,7 @@ endobj
 /F4.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [285 0 R 286 0 R]
@@ -42947,7 +42947,7 @@ endobj
 endobj
 284 0 obj
 << /Limits [(hbase.cluster.distributed) (hbase.data.umask.enable)]
-/Names [(hbase.cluster.distributed) 200 0 R (hbase.column.max.version) 434 0 R (hbase.commit.msg.format) 3810 0 R (hbase.coordinated.state.manager.class) 477 0 R (hbase.coprocessor.abortonerror) 399 0 R (hbase.coprocessor.enabled) 392 0 R (hbase.coprocessor.master.classes) 398 0 R (hbase.coprocessor.region.classes) 397 0 R (hbase.coprocessor.user.enabled) 396 0 R (hbase.data.umask) 425 0 R (hbase.data.umask.enable) 424 0 R]
+/Names [(hbase.cluster.distributed) 200 0 R (hbase.column.max.version) 434 0 R (hbase.commit.msg.format) 3829 0 R (hbase.coordinated.state.manager.class) 477 0 R (hbase.coprocessor.abortonerror) 399 0 R (hbase.coprocessor.enabled) 392 0 R (hbase.coprocessor.master.classes) 398 0 R (hbase.coprocessor.region.classes) 397 0 R (hbase.coprocessor.user.enabled) 396 0 R (hbase.data.umask) 425 0 R (hbase.data.umask.enable) 424 0 R]
 >>
 endobj
 285 0 obj
@@ -43493,7 +43493,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -43984,7 +43984,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -44556,7 +44556,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -45117,7 +45117,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -45714,7 +45714,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -46318,7 +46318,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -46861,7 +46861,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -47369,7 +47369,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -47944,7 +47944,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [350 0 R 354 0 R]
@@ -48513,7 +48513,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -48523,7 +48523,7 @@ endobj
 endobj
 359 0 obj
 << /Limits [(quota) (regions.arch)]
-/Names [(quota) 3196 0 R (read-api-and-usage) 1919 0 R (read-hbase-shell-commands-from-a-command-file) 781 0 R (reading-filtering-and-sending-edits) 3176 0 R (reading_cells_with_labels) 1405 0 R (recommended.configurations.hdfs) 546 0 R (recommended_configurations) 541 0 R (recommended_configurations.zk) 542 0 R (region-overlap-repairs) 3923 0 R (region-replication-for-meta-table-s-region) 1896 0 R (regions.arch) 1692 0 R]
+/Names [(quota) 3214 0 R (read-api-and-usage) 1937 0 R (read-hbase-shell-commands-from-a-command-file) 785 0 R (reading-filtering-and-sending-edits) 3194 0 R (reading_cells_with_labels) 1409 0 R (recommended.configurations.hdfs) 546 0 R (recommended_configurations) 541 0 R (recommended_configurations.zk) 542 0 R (region-overlap-repairs) 3942 0 R (region-replication-for-meta-table-s-region) 1914 0 R (regions.arch) 1710 0 R]
 >>
 endobj
 360 0 obj
@@ -49124,7 +49124,7 @@ endobj
 /F4.0 35 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [370 0 R 371 0 R 373 0 R 375 0 R 376 0 R]
@@ -49135,7 +49135,7 @@ endobj
 endobj
 368 0 obj
 << /Limits [(hbase.regionserver.thrift.compact) (hbase.rootdir.perms)]
-/Names [(hbase.regionserver.thrift.compact) 420 0 R (hbase.regionserver.thrift.framed) 416 0 R (hbase.regionserver.thrift.framed.max_frame_size_in_mb) 417 0 R (hbase.replication.management) 3146 0 R (hbase.replication.rpc.codec) 486 0 R (hbase.replication.source.maxthreads) 487 0 R (hbase.rest-csrf.browser-useragents-regex) 470 0 R (hbase.rest.csrf.enabled) 469 0 R (hbase.rest.filter.classes) 458 0 R (hbase.rest.port) 400 0 R (hbase.rest.readonly) 403 0 R (hbase.rest.support.proxyuser) 406 0 R (hbase.rest.threads.max) 404 0 R (hbase.rest.threads.min) 405 0 R (hbase.rolling.restart) 635 0 R (hbase.rolling.upgrade) 631 0 R (hbase.rootdir) 199 0 R (hbase.rootdir.perms) 421 0 R]
+/Names [(hbase.regionserver.thrift.compact) 420 0 R (hbase.regionserver.thrift.framed) 416 0 R (hbase.regionserver.thrift.framed.max_frame_size_in_mb) 417 0 R (hbase.replication.management) 3164 0 R (hbase.replication.rpc.codec) 486 0 R (hbase.replication.source.maxthreads) 487 0 R (hbase.rest-csrf.browser-useragents-regex) 470 0 R (hbase.rest.csrf.enabled) 469 0 R (hbase.rest.filter.classes) 458 0 R (hbase.rest.port) 400 0 R (hbase.rest.readonly) 403 0 R (hbase.rest.support.proxyuser) 406 0 R (hbase.rest.threads.max) 404 0 R (hbase.rest.threads.min) 405 0 R (hbase.rolling.restart) 635 0 R (hbase.rolling.upgrade) 631 0 R (hbase.rootdir) 199 0 R (hbase.rootdir.perms) 421 0 R]
 >>
 endobj
 369 0 obj
@@ -49715,7 +49715,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -50257,7 +50257,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -50808,7 +50808,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -51327,7 +51327,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -51845,7 +51845,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -51855,7 +51855,7 @@ endobj
 endobj
 412 0 obj
 << /Limits [(hbase.zookeeper.property.maxClientCnxns) (hfile.block.index.cacheonwrite)]
-/Names [(hbase.zookeeper.property.maxClientCnxns) 273 0 R (hbase.zookeeper.property.syncLimit) 267 0 R (hbase.zookeeper.quorum) 203 0 R (hbase_apis) 2122 0 R (hbase_default_configurations) 197 0 R (hbase_env) 530 0 R (hbase_metrics) 3089 0 R (hbase_mob) 1933 0 R (hbase_site) 526 0 R (hbase_supported_tested_definitions) 42 0 R (hbck) 2986 0 R (hbck.in.depth) 3915 0 R (health.check) 2982 0 R (hedged.reads) 2586 0 R (hfile) 1749 0 R (hfile-format) 1750 0 R (hfile-format-2) 4018 0 R (hfile.block.bloom.cacheonwrite) 355 0 R (hfile.block.cache.size) 344 0 R (hfile.block.index.cacheonwrite) 345 0 R]
+/Names [(hbase.zookeeper.property.maxClientCnxns) 273 0 R (hbase.zookeeper.property.syncLimit) 267 0 R (hbase.zookeeper.quorum) 203 0 R (hbase_apis) 2140 0 R (hbase_default_configurations) 197 0 R (hbase_env) 530 0 R (hbase_metrics) 3107 0 R (hbase_mob) 1951 0 R (hbase_site) 526 0 R (hbase_supported_tested_definitions) 42 0 R (hbck) 3005 0 R (hbck.in.depth) 3934 0 R (health.check) 3001 0 R (hedged.reads) 2604 0 R (hfile) 1767 0 R (hfile-format) 1768 0 R (hfile-format-2) 4037 0 R (hfile.block.bloom.cacheonwrite) 355 0 R (hfile.block.cache.size) 344 0 R (hfile.block.index.cacheonwrite) 345 0 R]
 >>
 endobj
 413 0 obj
@@ -52341,7 +52341,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -52354,7 +52354,7 @@ endobj
 endobj
 422 0 obj
 << /Limits [(hbase.rpc) (hbase.secure.spnego.ui)]
-/Names [(hbase.rpc) 4136 0 R (hbase.rpc.rows.warning.threshold) 509 0 R (hbase.rpc.shortoperation.timeout) 367 0 R (hbase.rpc.timeout) 361 0 R (hbase.rs.cacheblocksonwrite) 360 0 R (hbase.secure.bulkload) 1431 0 R (hbase.secure.configuration) 1271 0 R (hbase.secure.enable) 1435 0 R (hbase.secure.simpleconfiguration) 1313 0 R (hbase.secure.spnego.ui) 1266 0 R]
+/Names [(hbase.rpc) 4155 0 R (hbase.rpc.rows.warning.threshold) 509 0 R (hbase.rpc.shortoperation.timeout) 367 0 R (hbase.rpc.timeout) 361 0 R (hbase.rs.cacheblocksonwrite) 360 0 R (hbase.secure.bulkload) 1435 0 R (hbase.secure.configuration) 1275 0 R (hbase.secure.enable) 1439 0 R (hbase.secure.simpleconfiguration) 1317 0 R (hbase.secure.spnego.ui) 1270 0 R]
 >>
 endobj
 423 0 obj
@@ -52875,7 +52875,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -53443,7 +53443,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -53960,7 +53960,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -54503,7 +54503,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [461 0 R 462 0 R]
@@ -54531,7 +54531,7 @@ endobj
 endobj
 460 0 obj
 << /Limits [(hbase.limitetprivate.api) (hbase.master.logcleaner.plugins)]
-/Names [(hbase.limitetprivate.api) 626 0 R (hbase.local.dir) 205 0 R (hbase.mapreduce.classpath) 1168 0 R (hbase.master.balancer.maxRitPercent) 295 0 R (hbase.master.fileSplitTimeout) 217 0 R (hbase.master.hfilecleaner.plugins) 215 0 R (hbase.master.info.bindAddress) 209 0 R (hbase.master.info.port) 208 0 R (hbase.master.infoserver.redirect) 216 0 R (hbase.master.kerberos.principal) 378 0 R (hbase.master.keytab.file) 377 0 R (hbase.master.loadbalance.bytable) 463 0 R (hbase.master.loadbalancer.class) 459 0 R (hbase.master.logcleaner.plugins) 212 0 R]
+/Names [(hbase.limitetprivate.api) 626 0 R (hbase.local.dir) 205 0 R (hbase.mapreduce.classpath) 1172 0 R (hbase.master.balancer.maxRitPercent) 295 0 R (hbase.master.fileSplitTimeout) 217 0 R (hbase.master.hfilecleaner.plugins) 215 0 R (hbase.master.info.bindAddress) 209 0 R (hbase.master.info.port) 208 0 R (hbase.master.infoserver.redirect) 216 0 R (hbase.master.kerberos.principal) 378 0 R (hbase.master.keytab.file) 377 0 R (hbase.master.loadbalance.bytable) 463 0 R (hbase.master.loadbalancer.class) 459 0 R (hbase.master.logcleaner.plugins) 212 0 R]
 >>
 endobj
 461 0 obj
@@ -55150,7 +55150,7 @@ endobj
 /F4.0 35 0 R
 /F6.0 471 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [467 0 R 468 0 R]
@@ -55191,11 +55191,11 @@ endobj
 << /Type /Font
 /BaseFont /066905+mplus1mn-bold
 /Subtype /TrueType
-/FontDescriptor 4868 0 R
+/FontDescriptor 4886 0 R
 /FirstChar 32
 /LastChar 255
-/Widths 4870 0 R
-/ToUnicode 4869 0 R
+/Widths 4888 0 R
+/ToUnicode 4887 0 R
 >>
 endobj
 472 0 obj
@@ -55761,7 +55761,7 @@ endobj
 /F3.0 33 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -55774,7 +55774,7 @@ endobj
 endobj
 478 0 obj
 << /Limits [(hbase.defaults.for.version.skip) (hbase.hregion.percolumnfamilyflush.size.lower.bound.min)]
-/Names [(hbase.defaults.for.version.skip) 407 0 R (hbase.dfs.client.read.shortcircuit.buffer.size) 439 0 R (hbase.display.keys) 391 0 R (hbase.dynamic.jars.dir) 456 0 R (hbase.encryption.server) 1418 0 R (hbase.env.sh) 511 0 R (hbase.fix.version.in.jira) 3798 0 R (hbase.history) 4096 0 R (hbase.hregion.majorcompaction) 314 0 R (hbase.hregion.majorcompaction.jitter) 315 0 R (hbase.hregion.max.filesize) 313 0 R (hbase.hregion.memstore.block.multiplier) 309 0 R (hbase.hregion.memstore.flush.size) 306 0 R (hbase.hregion.memstore.mslab.enabled) 310 0 R (hbase.hregion.percolumnfamilyflush.size.lower.bound.min) 307 0 R]
+/Names [(hbase.defaults.for.version.skip) 407 0 R (hbase.dfs.client.read.shortcircuit.buffer.size) 439 0 R (hbase.display.keys) 391 0 R (hbase.dynamic.jars.dir) 456 0 R (hbase.encryption.server) 1422 0 R (hbase.env.sh) 511 0 R (hbase.fix.version.in.jira) 3817 0 R (hbase.history) 4115 0 R (hbase.hregion.majorcompaction) 314 0 R (hbase.hregion.majorcompaction.jitter) 315 0 R (hbase.hregion.max.filesize) 313 0 R (hbase.hregion.memstore.block.multiplier) 309 0 R (hbase.hregion.memstore.flush.size) 306 0 R (hbase.hregion.memstore.mslab.enabled) 310 0 R (hbase.hregion.percolumnfamilyflush.size.lower.bound.min) 307 0 R]
 >>
 endobj
 479 0 obj
@@ -56313,7 +56313,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -56828,7 +56828,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -57339,7 +57339,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -57355,7 +57355,7 @@ endobj
 endobj
 501 0 obj
 << /Limits [(hbase.mob.file.cache.size) (hbase.org.site.contributing)]
-/Names [(hbase.mob.file.cache.size) 492 0 R (hbase.moduletest.run) 3519 0 R (hbase.moduletest.shell) 3514 0 R (hbase.moduletests) 3512 0 R (hbase.normalizer.min.region.count) 300 0 R (hbase.normalizer.period) 299 0 R (hbase.offpeak.end.hour) 335 0 R (hbase.offpeak.start.hour) 334 0 R (hbase.org) 3501 0 R (hbase.org.site.contributing) 3503 0 R]
+/Names [(hbase.mob.file.cache.size) 492 0 R (hbase.moduletest.run) 3537 0 R (hbase.moduletest.shell) 3532 0 R (hbase.moduletests) 3530 0 R (hbase.normalizer.min.region.count) 300 0 R (hbase.normalizer.period) 299 0 R (hbase.offpeak.end.hour) 335 0 R (hbase.offpeak.start.hour) 334 0 R (hbase.org) 3519 0 R (hbase.org.site.contributing) 3521 0 R]
 >>
 endobj
 502 0 obj
@@ -57870,7 +57870,7 @@ endobj
 /F1.0 10 0 R
 /F2.0 29 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -58981,7 +58981,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [517 0 R]
@@ -59829,7 +59829,7 @@ endobj
 /F4.0 35 0 R
 /F2.0 29 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [520 0 R]
@@ -61286,7 +61286,7 @@ endobj
 /F3.0 33 0 R
 /F1.1 38 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -61810,7 +61810,7 @@ endobj
 /F1.0 10 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -62463,7 +62463,7 @@ endobj
 /F4.0 35 0 R
 /F1.1 38 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [535 0 R 536 0 R 538 0 R 539 0 R 540 0 R 545 0 R]
@@ -63218,7 +63218,7 @@ endobj
 /F2.0 29 0 R
 /F1.1 38 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [551 0 R 554 0 R 557 0 R]
@@ -63257,7 +63257,7 @@ endobj
 endobj
 556 0 obj
 << /Limits [(configuration) (coprocessor-implementation-overview)]
-/Names [(configuration) 104 0 R (configuration-2) 3118 0 R (configuration-3) 3886 0 R (configuration-files) 111 0 R (configuration-from-scratch) 3783 0 R (configuration-properties) 1901 0 R (configure-mob-compaction-mergeable-threshold) 1942 0 R (configure-mob-compaction-policy) 1941 0 R (configuring-columns-for-mob) 1938 0 R (configuring-server-wide-behavior-of-bloom-filters) 2492 0 R (configuring-the-rest-server-and-client) 2141 0 R (confirm) 190 0 R (connection-setup) 4148 0 R (constraints) 1070 0 R (contributing-to-documentation-or-other-strings) 3821 0 R (coprocessor-implementation-overview) 2297 0 R]
+/Names [(configuration) 104 0 R (configuration-2) 3136 0 R (configuration-3) 3905 0 R (configuration-files) 111 0 R (configuration-from-scratch) 3802 0 R (configuration-properties) 1919 0 R (configure-mob-compaction-mergeable-threshold) 1960 0 R (configure-mob-compaction-policy) 1959 0 R (configuring-columns-for-mob) 1956 0 R (configuring-server-wide-behavior-of-bloom-filters) 2510 0 R (configuring-the-rest-server-and-client) 2159 0 R (confirm) 190 0 R (connection-setup) 4167 0 R (constraints) 1074 0 R (contributing-to-documentation-or-other-strings) 3840 0 R (coprocessor-implementation-overview) 2315 0 R]
 >>
 endobj
 557 0 obj
@@ -64119,7 +64119,7 @@ endobj
 /F4.0 35 0 R
 /F5.1 45 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [561 0 R]
@@ -64933,7 +64933,7 @@ endobj
 /F5.1 45 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [565 0 R 566 0 R 567 0 R 571 0 R 574 0 R 575 0 R 576 0 R]
@@ -65675,7 +65675,7 @@ endobj
 /F1.1 38 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [580 0 R 581 0 R]
@@ -67107,7 +67107,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [585 0 R 586 0 R]
@@ -67884,7 +67884,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -68585,7 +68585,7 @@ endobj
 /F3.0 33 0 R
 /F5.1 45 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -69916,7 +69916,7 @@ endobj
 /F4.0 35 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -71411,7 +71411,7 @@ endobj
 /Font << /F2.0 29 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 >>
@@ -71577,7 +71577,7 @@ endobj
 /Font << /F2.0 29 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -71775,7 +71775,7 @@ endobj
 /Font << /F2.0 29 0 R
 /F1.0 10 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [601 0 R 602 0 R 603 0 R 604 0 R 605 0 R]
@@ -72529,7 +72529,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [610 0 R]
@@ -73472,7 +73472,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [614 0 R 615 0 R 616 0 R]
@@ -75860,7 +75860,7 @@ endobj
 /F1.1 38 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [620 0 R 621 0 R]
@@ -76745,7 +76745,7 @@ endobj
 /F2.0 29 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [624 0 R]
@@ -77219,7 +77219,7 @@ endobj
 /F4.0 35 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [632 0 R 633 0 R 634 0 R 636 0 R 637 0 R 638 0 R]
@@ -78011,7 +78011,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [646 0 R]
@@ -78022,17 +78022,17 @@ endobj
 endobj
 642 0 obj
 << /Limits [(standalone.over.hdfs) (storefile-refresher)]
-/Names [(standalone.over.hdfs) 169 0 R (standalone_dist) 164 0 R (starting-and-stopping-the-rest-server) 2140 0 R (static-loading) 2331 0 R (static-unloading) 2334 0 R (store) 1743 0 R (store-file-ttl) 1895 0 R (store.file.dir) 1758 0 R (store.memstore) 1744 0 R (storefile-refresher) 1891 0 R]
+/Names [(standalone.over.hdfs) 169 0 R (standalone_dist) 164 0 R (starting-and-stopping-the-rest-server) 2158 0 R (static-loading) 2349 0 R (static-unloading) 2352 0 R (store) 1761 0 R (store-file-ttl) 1913 0 R (store.file.dir) 1776 0 R (store.memstore) 1762 0 R (storefile-refresher) 1909 0 R]
 >>
 endobj
 643 0 obj
 << /Limits [(__anchor-top) (cascading)]
-/Kids [157 0 R 3628 0 R 1860 0 R 1455 0 R 3863 0 R 1617 0 R 4044 0 R 2090 0 R 2048 0 R 2005 0 R 2039 0 R 3439 0 R]
+/Kids [157 0 R 3646 0 R 1878 0 R 1459 0 R 3882 0 R 1622 0 R 4063 0 R 2108 0 R 2066 0 R 2023 0 R 2057 0 R 3457 0 R]
 >>
 endobj
 644 0 obj
 << /Limits [(hbase.mob.file.cache.size) (hbase.zookeeper.property.initLimit)]
-/Kids [501 0 R 3991 0 R 441 0 R 239 0 R 368 0 R 422 0 R 4137 0 R 455 0 R 268 0 R 3551 0 R 3522 0 R]
+/Kids [501 0 R 4010 0 R 441 0 R 239 0 R 368 0 R 422 0 R 4156 0 R 455 0 R 268 0 R 3569 0 R 3540 0 R]
 >>
 endobj
 645 0 obj
@@ -78696,7 +78696,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -79411,7 +79411,7 @@ endobj
 /F1.0 10 0 R
 /F2.0 29 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [654 0 R]
@@ -79878,7 +79878,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 >>
@@ -80598,7 +80598,7 @@ endobj
 /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [663 0 R 666 0 R]
@@ -81841,7 +81841,7 @@ endobj
 /F3.0 33 0 R
 /F2.0 29 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [669 0 R 670 0 R 671 0 R 672 0 R 673 0 R 674 0 R 675 0 R 676 0 R 679 0 R 680 0 R]
@@ -82781,7 +82781,7 @@ endobj
 /Font << /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [683 0 R 686 0 R 689 0 R]
@@ -82817,7 +82817,7 @@ endobj
 endobj
 688 0 obj
 << /Limits [(upgrade2.0.process) (upgrade2.0.tracing)]
-/Names [(upgrade2.0.process) 749 0 R (upgrade2.0.public.api) 729 0 R (upgrade2.0.regions.on.master) 684 0 R (upgrade2.0.removed.configs) 665 0 R (upgrade2.0.renamed.configs) 677 0 R (upgrade2.0.replication) 708 0 R (upgrade2.0.rolling.upgrades) 748 0 R (upgrade2.0.shaded.client.preferred) 722 0 R (upgrade2.0.shell) 712 0 R (upgrade2.0.tracing) 730 0 R]
+/Names [(upgrade2.0.process) 755 0 R (upgrade2.0.public.api) 729 0 R (upgrade2.0.regions.on.master) 684 0 R (upgrade2.0.removed.configs) 665 0 R (upgrade2.0.renamed.configs) 677 0 R (upgrade2.0.replication) 708 0 R (upgrade2.0.rolling.upgrades) 752 0 R (upgrade2.0.shaded.client.preferred) 722 0 R (upgrade2.0.shell) 712 0 R (upgrade2.0.tracing) 730 0 R]
 >>
 endobj
 689 0 obj
@@ -83696,7 +83696,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [693 0 R 695 0 R 696 0 R 697 0 R]
@@ -84423,7 +84423,7 @@ endobj
 /Font << /F1.0 10 0 R
 /F3.0 33 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [702 0 R 705 0 R 706 0 R]
@@ -85475,7 +85475,7 @@ endobj
 /F3.0 33 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
 /Annots [711 0 R 713 0 R 715 0 R 718 0 R]
@@ -86247,7 +86247,7 @@ endobj
 /F3.0 33 0 R
 /F2.0 29 0 R
 >>
-/XObject << /Stamp2 4615 0 R
+/XObject << /Stamp2 4633 0 R
 >>
 >>
 /Annots [726 0 R 727 0 R 728 0 R]
@@ -86264,7 +86264,7 @@ endobj
 endobj
 725 0 obj
 << /Limits [(upgrade2.0.distributed.log.replay) (upgrade2.0.prefix-tree.removed)]
-/Names [(upgrade2.0.distributed.log.replay) 685 0 R (upgrade2.0.filters) 719 0 R (upgrade2.0.hbck) 664 0 R (upgrade2.0.hfile3.only) 716 0 R (upgrade2.0.logging) 694 0 R (upgrade2.0.mapreduce.module) 723 0 R (upgrade2.0.memory) 704 0 R (upgrade2.0.metrics) 692 0 R (upgrade2.0.pb.wal.only) 717 0 R (upgrade2.0.permissions) 701 0 R (upgrade2.0.prefix-tree.removed) 687 0 R]
+/Names [(upgrade2.0.distributed.log.replay) 685 0 R (upgrade2.0.filters) 719 0 R (upgrade2.0.hbck) 664 0 R (upgrade2.0.hfile3.only) 716 0 R (upgrade2.0.logging) 694 0 R (upgrade2.0.mapreduce.module) 723 0 R (upgrade2.0.memory) 704 0 R (upgrade2.0.metrics) 692 0 R (upgrade2.0.pb.wal.only) 717 0 R (upgrade2.0.perf) 733 0 R (upgrade2.0.permissions) 701 0 R (upgrade2.0.prefix-tree.removed) 687 0 R]
 >>
 endobj
 726 0 obj
@@ -86298,7 +86298,7 @@ endobj
 [721 0 R /XYZ 0 173.276 null]
 endobj
 731 0 obj
-<< /Length 13819
+<< /Length 15232
 >>
 stream
 q
@@ -86334,7 +86334,141 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-48.24 743.646 Td
+48.24 750.9642 Td
+/F3.0 9.975 Tf
+<506572666f726d616e6365> Tj
+ET
+
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2 0.2 0.2 scn
+0.2 0.2 0.2 SCN
+
+1.1908 Tw
+
+BT
+48.24 731.375 Td
+/F1.0 10.5 Tf
+[<59> 69.8242 <6f752077696c6c206c696b> 20.0195 <656c79207365652061206368616e676520696e2074686520706572666f726d616e63652070726f66696c65206f6e2075706772> 20.0195 <61646520746f2068626173652d322e302e3020676976656e207265616420616e64>] TJ
+ET
+
+
+0.0 Tw
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2 0.2 0.2 scn
+0.2 0.2 0.2 SCN
+
+0.4461 Tw
+
+BT
+48.24 715.595 Td
+/F1.0 10.5 Tf
+[<7772697465207061746873206861766520756e646572676f6e65207369676e69666963616e74206368616e67652e204f6e2072656c656173652c20777269746573206d61> 20.0195 <7920626520736c6f77657220776974682072656164732061626f7574>] TJ
+ET
+
+
+0.0 Tw
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2 0.2 0.2 scn
+0.2 0.2 0.2 SCN
+
+0.8202 Tw
+
+BT
+48.24 699.815 Td
+/F1.0 10.5 Tf
+<7468652073616d65206f72206d756368206265747465722c20646570656e64656e74206f6e20636f6e746578742e20426520707265706172656420746f207370656e642074696d652072652d74756e696e67202853656520> Tj
+ET
+
+
+0.0 Tw
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2588 0.5451 0.7922 scn
+0.2588 0.5451 0.7922 SCN
+
+0.8202 Tw
+
+BT
+509.8595 699.815 Td
+/F1.0 10.5 Tf
+<417061636865> Tj
+ET
+
+
+0.0 Tw
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2588 0.5451 0.7922 scn
+0.2588 0.5451 0.7922 SCN
+
+0.9483 Tw
+
+BT
+48.24 684.035 Td
+/F1.0 10.5 Tf
+<484261736520506572666f726d616e63652054756e696e67> Tj
+ET
+
+
+0.0 Tw
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2 0.2 0.2 scn
+0.2 0.2 0.2 SCN
+
+0.9483 Tw
+
+BT
+187.7812 684.035 Td
+/F1.0 10.5 Tf
+<292e20506572666f726d616e636520697320616c736f20616e20617265612074686174206973206e6f7720756e646572206163746976652072657669657720736f206c6f6f6b> Tj
+ET
+
+
+0.0 Tw
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2 0.2 0.2 scn
+0.2 0.2 0.2 SCN
+
+BT
+48.24 668.255 Td
+/F1.0 10.5 Tf
+<666f727761726420746f20696d70726f76656d656e7420696e20636f6d696e672072656c6561736573202853656520> Tj
+ET
+
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2588 0.5451 0.7922 scn
+0.2588 0.5451 0.7922 SCN
+
+BT
+294.654 668.255 Td
+/F1.0 10.5 Tf
+[<4842> 20.0195 <4153452d323031383820544553> 20.0195 <54494e4720506572666f726d616e6365>] TJ
+ET
+
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2 0.2 0.2 scn
+0.2 0.2 0.2 SCN
+
+BT
+476.6921 668.255 Td
+/F1.0 10.5 Tf
+<292e> Tj
+ET
+
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2 0.2 0.2 scn
+0.2 0.2 0.2 SCN
+
+BT
+48.24 633.755 Td
 /F2.0 13 Tf
 [<31332e312e322e2055706772> 20.0195 <6164696e6720436f70726f636573736f727320746f20322e30>] TJ
 ET
@@ -86347,7 +86481,7 @@ ET
 2.435 Tw
 
 BT
-48.24 717.086 Td
+48.24 607.195 Td
 /F1.0 10.5 Tf
 [<436f70726f636573736f72732068617665206368616e676564207375627374616e7469616c6c7920696e20322e302072> 20.0195 <616e67696e672066726f6d20746f70206c6576656c2064657369676e206368616e67657320696e20636c617373>] TJ
 ET
@@ -86362,7 +86496,7 @@ ET
 0.1353 Tw
 
 BT
-48.24 701.306 Td
+48.24 591.415 Td
 /F1.0 10.5 Tf
 [<68696572> 20.0195 <6172636869657320746f206368616e6765642f72656d6f766564206d6574686f64732c20696e74657266616365732c206574632e2028506172656e74206a6972> 20.0195 <613a20>] TJ
 ET
@@ -86377,7 +86511,7 @@ ET
 0.1353 Tw
 
 BT
-399.6332 701.306 Td
+399.6332 591.415 Td
 /F1.0 10.5 Tf
 [<4842> 20.0195 <4153452d313831363920436f70726f636573736f7220666978>] TJ
 ET
@@ -86390,7 +86524,7 @@ ET
 0.2588 0.5451 0.7922 SCN
 
 BT
-48.24 685.526 Td
+48.24 575.635 Td
 /F1.0 10.5 Tf
 <616e6420636c65616e7570206265666f726520322e302e302072656c65617365> Tj
 ET
@@ -86401,7 +86535,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-209.1315 685.526 Td
+209.1315 575.635 Td
 /F1.0 10.5 Tf
 <292e20536f6d65206f662074686520726561736f6e7320666f7220737563682077696465737072656164206368616e6765733a> Tj
 ET
@@ -86414,7 +86548,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-52.6765 657.746 Td
+52.6765 547.855 Td
 /F1.0 10.5 Tf
 <312e> Tj
 ET
@@ -86429,7 +86563,7 @@ ET
 2.0347 Tw
 
 BT
-66.24 657.746 Td
+66.24 547.855 Td
 /F1.0 10.5 Tf
 [<5061737320496e746572666163657320696e7374656164206f6620496d706c656d656e746174696f6e733b20652e672e2054> 29.7852 <61626c6544657363726970746f7220696e7374656164206f66204854> 29.7852 <61626c6544657363726970746f72>] TJ
 ET
@@ -86444,7 +86578,7 @@ ET
 1.1185 Tw
 
 BT
-66.24 641.966 Td
+66.24 532.075 Td
 /F1.0 10.5 Tf
 <616e6420526567696f6e20696e7374656164206f662048526567696f6e2028> Tj
 ET
@@ -86459,7 +86593,7 @@ ET
 1.1185 Tw
 
 BT
-231.5796 641.966 Td
+231.5796 532.075 Td
 /F1.0 10.5 Tf
 [<4842> 20.0195 <4153452d3138323431>] TJ
 ET
@@ -86474,7 +86608,7 @@ ET
 1.1185 Tw
 
 BT
-298.7899 641.966 Td
+298.7899 532.075 Td
 /F1.0 10.5 Tf
 [<204368616e676520636c69656e742e54> 29.7852 <61626c6520616e6420636c69656e742e41> 20.0195 <646d696e20746f206e6f7420757365>] TJ
 ET
@@ -86487,7 +86621,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-66.24 626.186 Td
+66.24 516.295 Td
 /F1.0 10.5 Tf
 [<4854> 29.7852 <61626c6544657363726970746f72292e>] TJ
 ET
@@ -86500,7 +86634,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-52.6765 604.406 Td
+52.6765 494.515 Td
 /F1.0 10.5 Tf
 <322e> Tj
 ET
@@ -86515,7 +86649,7 @@ ET
 0.0581 Tw
 
 BT
-66.24 604.406 Td
+66.24 494.515 Td
 /F1.0 10.5 Tf
 <44657369676e207265666163746f7220736f20696d706c656d656e74657273206e65656420746f2066696c6c206f7574206c65737320626f696c6572706c61746520616e6420736f2077652063616e20646f206d6f726520636f6d70696c652d> Tj
 ET
@@ -86528,7 +86662,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-66.24 588.626 Td
+66.24 478.735 Td
 /F1.0 10.5 Tf
 <74696d6520636865636b696e672028> Tj
 ET
@@ -86539,7 +86673,7 @@ ET
 0.2588 0.5451 0.7922 SCN
 
 BT
-142.3965 588.626 Td
+142.3965 478.735 Td
 /F1.0 10.5 Tf
 [<4842> 20.0195 <4153452d3137373332>] TJ
 ET
@@ -86550,7 +86684,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-209.6068 588.626 Td
+209.6068 478.735 Td
 /F1.0 10.5 Tf
 <29> Tj
 ET
@@ -86563,7 +86697,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-52.6765 566.846 Td
+52.6765 456.955 Td
 /F1.0 10.5 Tf
 <332e> Tj
 ET
@@ -86576,7 +86710,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-66.24 566.846 Td
+66.24 456.955 Td
 /F1.0 10.5 Tf
 <50757267652050726f746f636f6c20427566666572732066726f6d20436f70726f636573736f72204150492028> Tj
 ET
@@ -86587,7 +86721,7 @@ ET
 0.2588 0.5451 0.7922 SCN
 
 BT
-297.1245 566.846 Td
+297.1245 456.955 Td
 /F1.0 10.5 Tf
 [<4842> 20.0195 <4153452d3138383539>] TJ
 ET
@@ -86598,7 +86732,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-364.3348 566.846 Td
+364.3348 456.955 Td
 /F1.0 10.5 Tf
 <2c20> Tj
 ET
@@ -86609,7 +86743,7 @@ ET
 0.2588 0.5451 0.7922 SCN
 
 BT
-369.6793 566.846 Td
+369.6793 456.955 Td
 /F1.0 10.5 Tf
 [<4842> 20.0195 <4153452d3136373639>] TJ
 ET
@@ -86620,7 +86754,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-436.8896 566.846 Td
+436.8896 456.955 Td
 /F1.0 10.5 Tf
 <2c2065746329> Tj
 ET
@@ -86633,7 +86767,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-52.6765 545.066 Td
+52.6765 435.175 Td
 /F1.0 10.5 Tf
 <342e> Tj
 ET
@@ -86648,7 +86782,7 @@ ET
 0.2446 Tw
 
 BT
-66.24 545.066 Td
+66.24 435.175 Td
 /F1.0 10.5 Tf
 <437574206261636b206f6e2077686174207765206578706f736520746f20436f70726f636573736f72732072656d6f76696e6720686f6f6b73206f6e20696e7465726e616c732074686174207765726520746f6f2070726976617465> Tj
 ET
@@ -86663,7 +86797,7 @@ ET
 2.3656 Tw
 
 BT
-66.24 529.286 Td
+66.24 419.395 Td
 /F1.0 10.5 Tf
 <746f206578706f73652028666f722065672e20> Tj
 ET
@@ -86678,7 +86812,7 @@ ET
 2.3656 Tw
 
 BT
-163.2722 529.286 Td
+163.2722 419.395 Td
 /F1.0 10.5 Tf
 [<4842> 20.0195 <4153452d3138343533>] TJ
 ET
@@ -86693,7 +86827,7 @@ ET
 2.3656 Tw
 
 BT
-230.4825 529.286 Td
+230.4825 419.395 Td
 /F1.0 10.5 Tf
 <20436f6d70616374696f6e526571756573742073686f756c64206e6f74206265206578706f73656420746f2075736572206469726563746c793b> Tj
 ET
@@ -86706,7 +86840,7 @@ ET
 0.2588 0.5451 0.7922 SCN
 
 BT
-66.24 513.506 Td
+66.24 403.615 Td
 /F1.0 10.5 Tf
 [<4842> 20.0195 <4153452d3138323938>] TJ
 ET
@@ -86717,7 +86851,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-133.4503 513.506 Td
+133.4503 403.615 Td
 /F1.0 10.5 Tf
 <20526567696f6e536572766572536572766963657320496e7465726661636520636c65616e757020666f72204350206578706f73653b2065746329> Tj
 ET
@@ -86730,7 +86864,7 @@ ET
 0.9756 Tw
 
 BT
-48.24 485.726 Td
+48.24 375.835 Td
 /F1.0 10.5 Tf
 [<54> 29.7852 <6f2075736520636f70726f636573736f727320696e20322e302c20746865792073686f756c642062652072656275696c7420616761696e7374206e657720415049206f746865727769736520746865792077696c6c206661696c20746f206c6f6164>] TJ
 ET
@@ -86743,7 +86877,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-48.24 469.946 Td
+48.24 360.055 Td
 /F1.0 10.5 Tf
 <616e642048426173652070726f6365737365732077696c6c206469652e> Tj
 ET
@@ -86754,7 +86888,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-48.24 442.166 Td
+48.24 332.275 Td
 /F1.0 10.5 Tf
 [<537567676573746564206f72646572206f66206368616e67657320746f2075706772> 20.0195 <6164652074686520636f70726f636573736f72733a>] TJ
 ET
@@ -86767,7 +86901,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-52.6765 414.386 Td
+52.6765 304.495 Td
 /F1.0 10.5 Tf
 <312e> Tj
 ET
@@ -86782,7 +86916,7 @@ ET
 0.4835 Tw
 
 BT
-66.24 414.386 Td
+66.24 304.495 Td
 /F1.0 10.5 Tf
 <4469726563746c7920696d706c656d656e74206f6273657276657220696e746572666163657320696e7374656164206f6620657874656e64696e6720426173652a4f6273657276657220636c61737365732e204368616e676520> Tj
 ET
@@ -86797,7 +86931,7 @@ ET
 0.4835 Tw
 
 BT
-531.29 414.386 Td
+531.29 304.495 Td
 /F4.0 10.5 Tf
 <466f6f> Tj
 ET
@@ -86810,7 +86944,7 @@ ET
 0.6941 0.1294 0.2745 SCN
 
 BT
-66.24 398.606 Td
+66.24 288.715 Td
 /F4.0 10.5 Tf
 <657874656e647320426173655858584f62736572766572> Tj
 ET
@@ -86821,7 +86955,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-186.99 398.606 Td
+186.99 288.715 Td
 /F1.0 10.5 Tf
 <20746f20> Tj
 ET
@@ -86832,7 +86966,7 @@ ET
 0.6941 0.1294 0.2745 SCN
 
 BT
-202.1835 398.606 Td
+202.1835 288.715 Td
 /F4.0 10.5 Tf
 <466f6f20696d706c656d656e7473205858584f62736572766572> Tj
 ET
@@ -86843,7 +86977,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-338.6835 398.606 Td
+338.6835 288.715 Td
 /F1.0 10.5 Tf
 <2e2028> Tj
 ET
@@ -86854,7 +86988,7 @@ ET
 0.2588 0.5451 0.7922 SCN
 
 BT
-347.661 398.606 Td
+347.661 288.715 Td
 /F1.0 10.5 Tf
 [<4842> 20.0195 <4153452d3137333132>] TJ
 ET
@@ -86865,7 +86999,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-414.8713 398.606 Td
+414.8713 288.715 Td
 /F1.0 10.5 Tf
 <292e> Tj
 ET
@@ -86878,7 +87012,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-52.6765 376.826 Td
+52.6765 266.935 Td
 /F1.0 10.5 Tf
 <322e> Tj
 ET
@@ -86893,7 +87027,7 @@ ET
 3.2087 Tw
 
 BT
-66.24 376.826 Td
+66.24 266.935 Td
 /F1.0 10.5 Tf
 [<41> 20.0195 <6461707420746f2064657369676e206368616e67652066726f6d20496e6865726974656e636520746f20436f6d706f736974696f6e2028>] TJ
 ET
@@ -86908,7 +87042,7 @@ ET
 3.2087 Tw
 
 BT
-380.5127 376.826 Td
+380.5127 266.935 Td
 /F1.0 10.5 Tf
 [<4842> 20.0195 <4153452d3137373332>] TJ
 ET
@@ -86923,7 +87057,7 @@ ET
 3.2087 Tw
 
 BT
-447.723 376.826 Td
+447.723 266.935 Td
 /F1.0 10.5 Tf
 [<292062> 20.0195 <7920666f6c6c6f77696e6720>] TJ
 ET
@@ -86938,7 +87072,7 @@ ET
 3.2087 Tw
 
 BT
-528.602 376.826 Td
+528.602 266.935 Td
 /F1.0 10.5 Tf
 <74686973> Tj
 ET
@@ -86951,7 +87085,7 @@ ET
 0.2588 0.5451 0.7922 SCN
 
 BT
-66.24 361.046 Td
+66.24 251.155 Td
 /F1.0 10.5 Tf
 <6578616d706c65> Tj
 ET
@@ -86962,7 +87096,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-109.0485 361.046 Td
+109.0485 251.155 Td
 /F1.0 10.5 Tf
 <2e> Tj
 ET
@@ -86975,7 +87109,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-52.6765 339.266 Td
+52.6765 229.375 Td
 /F1.0 10.5 Tf
 <332e> Tj
 ET
@@ -86990,7 +87124,7 @@ ET
 3.3416 Tw
 
 BT
-66.24 339.266 Td
+66.24 229.375 Td
 /F1.0 10.5 Tf
 [<67657454> 29.7852 <61626c65282920686173206265656e2072656d6f7665642066726f6d2074686520436f70726f636573736f72456e7672696f6e6d656e742c20636f70726f636573736f72732073686f756c642073656c662d>] TJ
 ET
@@ -87003,7 +87137,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-66.24 323.486 Td
+66.24 213.595 Td
 /F1.0 10.5 Tf
 [<6d616e6167652054> 29.7852 <61626c6520696e7374616e6365732e>] TJ
 ET
@@ -87014,7 +87148,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-48.24 295.706 Td
+48.24 185.815 Td
 /F1.0 10.5 Tf
 <536f6d65206578616d706c6573206f662077726974696e6720636f70726f636573736f72732077697468206e6577204150492063616e20626520666f756e6420696e2068626173652d6578616d706c65206d6f64756c6520> Tj
 ET
@@ -87025,7 +87159,7 @@ ET
 0.2588 0.5451 0.7922 SCN
 
 BT
-517.4745 295.706 Td
+517.4745 185.815 Td
 /F1.0 10.5 Tf
 <68657265> Tj
 ET
@@ -87036,7 +87170,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-540.312 295.706 Td
+540.312 185.815 Td
 /F1.0 10.5 Tf
 <202e> Tj
 ET
@@ -87049,7 +87183,7 @@ ET
 0.7569 Tw
 
 BT
-48.24 267.926 Td
+48.24 158.035 Td
 /F1.0 10.5 Tf
 [<4c6173746c79> 89.8438 <2c20696620616e2061706920686173206265656e206368616e6765642f72656d6f766564207468617420627265616b7320796f7520696e20616e2069727265706172> 20.0195 <61626c65207761> 20.0195 <79> 89.8438 <2c20616e64206966207468657265d5732061>] TJ
 ET
@@ -87062,7 +87196,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-48.24 252.146 Td
+48.24 142.255 Td
 /F1.0 10.5 Tf
 <676f6f64206a757374696669636174696f6e20746f20616464206974206261636b2c206272696e67206974206f7572206e6f746963652028> Tj
 ET
@@ -87073,7 +87207,7 @@ ET
 0.2588 0.5451 0.7922 SCN
 
 BT
-307.3695 252.146 Td
+307.3695 142.255 Td
 /F1.0 10.5 Tf
 <6465764068626173652e6170616368652e6f7267> Tj
 ET
@@ -87084,7 +87218,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-422.0925 252.146 Td
+422.0925 142.255 Td
 /F1.0 10.5 Tf
 <292e> Tj
 ET
@@ -87095,7 +87229,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-48.24 217.646 Td
+48.24 107.755 Td
 /F2.0 13 Tf
 [<31332e312e332e20526f6c6c696e672055706772> 20.0195 <6164652066726f6d20312e7820746f20322e78>] TJ
 ET
@@ -87108,7 +87242,7 @@ ET
 1.0492 Tw
 
 BT
-48.24 191.086 Td
+48.24 81.195 Td
 /F1.0 10.5 Tf
 [<5468657265206973206e6f20726f6c6c696e672075706772> 20.0195 <6164652066726f6d20484261736520312e782b20746f20484261736520322e782b2e20496e206f7264657220746f20706572666f726d2061207a65726f20646f776e74696d65>] TJ
 ET
@@ -87123,7 +87257,7 @@ ET
 1.2123 Tw
 
 BT
-48.24 175.306 Td
+48.24 65.415 Td
 /F1.0 10.5 Tf
 [<75706772> 20.0195 <6164652c20796f752077696c6c206e65656420746f2072756e20616e206164646974696f6e616c20636c757374657220696e20706172> 20.0195 <616c6c656c20616e642068616e646c65206661696c6f76657220696e206170706c69636174696f6e>] TJ
 ET
@@ -87132,65 +87266,6 @@ ET
 0.0 Tw
 0.0 0.0 0.0 SCN
 0.0 0.0 0.0 scn
-0.2 0.2 0.2 scn
-0.2 0.2 0.2 SCN
-
-BT
-48.24 159.526 Td
-/F1.0 10.5 Tf
-<6c6f6769632e> Tj
-ET
-
-0.0 0.0 0.0 SCN
-0.0 0.0 0.0 scn
-0.2 0.2 0.2 scn
-0.2 0.2 0.2 SCN
-
-BT
-48.24 125.026 Td
-/F2.0 13 Tf
-[<31332e312e342e2055706772> 20.0195 <6164652070726f636573732066726f6d20312e7820746f20322e78>] TJ
-ET
-
-0.0 0.0 0.0 SCN
-0.0 0.0 0.0 scn
-0.2 0.2 0.2 scn
-0.2 0.2 0.2 SCN
-
-BT
-48.24 98.466 Td
-/F1.0 10.5 Tf
-[<54> 29.7852 <6f2075706772> 20.0195 <61646520616e206578697374696e6720484261736520312e7820636c75737465722c20796f752073686f756c643a>] TJ
-ET
-
-0.0 0.0 0.0 SCN
-0.0 0.0 0.0 scn
-
--0.5 Tc
-0.2 0.2 0.2 scn
-0.2 0.2 0.2 SCN
-
-BT
-56.8805 70.686 Td
-/F1.0 10.5 Tf
-<a5> Tj
-ET
-
-0.0 0.0 0.0 SCN
-0.0 0.0 0.0 scn
-
-0.0 Tc
-0.2 0.2 0.2 scn
-0.2 0.2 0.2 SCN
-
-BT
-66.24 70.686 Td
-/F1.0 10.5 Tf
-<436c65616e2073687574646f776e206f66206578697374696e6720312e7820636c7573746572> Tj
-ET
-
-0.0 0.0 0.0 SCN
-0.0 0.0 0.0 scn
 q
 0.0 0.0 0.0 scn
 0.0 0.0 0.0 SCN
@@ -87226,13 +87301,14 @@ endobj
 /Contents 731 0 R
 /Resources << /ProcSet [/PDF /Text /ImageB /ImageC /ImageI]
 /Font << /F1.0 10 0 R
+/F3.0 33 0 R
 /F2.0 29 0 R
 /F4.0 35 0 R
 >>
-/XObject << /Stamp1 4614 0 R
+/XObject << /Stamp1 4632 0 R
 >>
 >>
-/Annots [734 0 R 735 0 R 736 0 R 737 0 R 738 0 R 739 0 R 740 0 R 741 0 R 742 0 R 743 0 R 744 0 R 745 0 R 746 0 R 747 0 R]
+/Annots [734 0 R 735 0 R 736 0 R 738 0 R 739 0 R 740 0 R 741 0 R 742 0 R 743 0 R 744 0 R 745 0 R 746 0 R 747 0 R 748 0 R 749 0 R 750 0 R 751 0 R]
 >>
 endobj
 733 0 obj
@@ -87240,193 +87316,227 @@ endobj
 endobj
 734 0 obj
 << /Border [0 0 0]
+/Dest (performance)
+/Subtype /Link
+/Rect [509.8595 696.749 547.04 711.029]
+/Type /Annot
+>>
+endobj
+735 0 obj
+<< /Border [0 0 0]
+/Dest (performance)
+/Subtype /Link
+/Rect [48.24 680.969 187.7812 695.249]
+/Type /Annot
+>>
+endobj
+736 0 obj
+<< /Border [0 0 0]
+/A << /Type /Action
+/S /URI
+/URI (https://issues.

<TRUNCATED>

[19/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
index e080cd6..73a1036 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
@@ -42,604 +42,605 @@
 <span class="sourceLineNo">034</span>import java.util.function.Predicate;<a name="line.34"></a>
 <span class="sourceLineNo">035</span>import java.util.stream.Collectors;<a name="line.35"></a>
 <span class="sourceLineNo">036</span><a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.commons.lang.builder.HashCodeBuilder;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.commons.logging.Log;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.commons.logging.LogFactory;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.conf.Configuration;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileStatus;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileSystem;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.Path;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.client.Get;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.client.Put;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.client.Result;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.Table;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.util.StringUtils;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.55"></a>
-<span class="sourceLineNo">056</span><a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span><a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>/**<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * Tracks file archiving and updates the hbase quota table.<a name="line.68"></a>
-<span class="sourceLineNo">069</span> */<a name="line.69"></a>
-<span class="sourceLineNo">070</span>@InterfaceAudience.Private<a name="line.70"></a>
-<span class="sourceLineNo">071</span>public class FileArchiverNotifierImpl implements FileArchiverNotifier {<a name="line.71"></a>
-<span class="sourceLineNo">072</span>  private static final Log LOG = LogFactory.getLog(FileArchiverNotifierImpl.class);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  private final Connection conn;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>  private final Configuration conf;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  private final FileSystem fs;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  private final TableName tn;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  private final ReadLock readLock;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private final WriteLock writeLock;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  private volatile long lastFullCompute = Long.MIN_VALUE;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  private List&lt;String&gt; currentSnapshots = Collections.emptyList();<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  private static final Map&lt;String,Object&gt; NAMESPACE_LOCKS = new HashMap&lt;&gt;();<a name="line.81"></a>
-<span class="sourceLineNo">082</span><a name="line.82"></a>
-<span class="sourceLineNo">083</span>  /**<a name="line.83"></a>
-<span class="sourceLineNo">084</span>   * An Exception thrown when SnapshotSize updates to hbase:quota fail to be written.<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   */<a name="line.85"></a>
-<span class="sourceLineNo">086</span>  @InterfaceAudience.Private<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  public static class QuotaSnapshotSizeSerializationException extends IOException {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    private static final long serialVersionUID = 1L;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>    public QuotaSnapshotSizeSerializationException(String msg) {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>      super(msg);<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    }<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  public FileArchiverNotifierImpl(<a name="line.95"></a>
-<span class="sourceLineNo">096</span>      Connection conn, Configuration conf, FileSystem fs, TableName tn) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    this.conn = conn;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    this.conf = conf;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    this.fs = fs;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    this.tn = tn;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    readLock = lock.readLock();<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    writeLock = lock.writeLock();<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  }<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  static synchronized Object getLockForNamespace(String namespace) {<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    return NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) -&gt; new Object());<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  }<a name="line.108"></a>
-<span class="sourceLineNo">109</span><a name="line.109"></a>
-<span class="sourceLineNo">110</span>  /**<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * Returns a strictly-increasing measure of time extracted by {@link System#nanoTime()}.<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   */<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  long getLastFullCompute() {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    return lastFullCompute;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  }<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  @Override<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  public void addArchivedFiles(Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    long start = System.nanoTime();<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    readLock.lock();<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    try {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>      // We want to catch the case where we got an archival request, but there was a full<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      // re-computation in progress that was blocking us. Most likely, the full computation is going<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      // to already include the changes we were going to make.<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      //<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      // Same as "start &lt; lastFullCompute" but avoiding numeric overflow per the<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      // System.nanoTime() javadoc<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      if (lastFullCompute != Long.MIN_VALUE &amp;&amp; start - lastFullCompute &lt; 0) {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>        if (LOG.isTraceEnabled()) {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>          LOG.trace("A full computation was performed after this request was received."<a name="line.130"></a>
-<span class="sourceLineNo">131</span>              + " Ignoring requested updates: " + fileSizes);<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        }<a name="line.132"></a>
-<span class="sourceLineNo">133</span>        return;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      }<a name="line.134"></a>
-<span class="sourceLineNo">135</span><a name="line.135"></a>
-<span class="sourceLineNo">136</span>      if (LOG.isTraceEnabled()) {<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        LOG.trace("currentSnapshots: " + currentSnapshots + " fileSize: "+ fileSizes);<a name="line.137"></a>
-<span class="sourceLineNo">138</span>      }<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>      // Write increment to quota table for the correct snapshot. Only do this if we have snapshots<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      // and some files that were archived.<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      if (!currentSnapshots.isEmpty() &amp;&amp; !fileSizes.isEmpty()) {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        // We get back the files which no snapshot referenced (the files which will be deleted soon)<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        groupArchivedFiledBySnapshotAndRecordSize(currentSnapshots, fileSizes);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      }<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    } finally {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      readLock.unlock();<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  }<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>  /**<a name="line.151"></a>
-<span class="sourceLineNo">152</span>   * For each file in the map, this updates the first snapshot (lexicographic snapshot name) that<a name="line.152"></a>
-<span class="sourceLineNo">153</span>   * references this file. The result of this computation is serialized to the quota table.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>   *<a name="line.154"></a>
-<span class="sourceLineNo">155</span>   * @param snapshots A collection of HBase snapshots to group the files into<a name="line.155"></a>
-<span class="sourceLineNo">156</span>   * @param fileSizes A map of file names to their sizes<a name="line.156"></a>
-<span class="sourceLineNo">157</span>   */<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  void groupArchivedFiledBySnapshotAndRecordSize(<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      List&lt;String&gt; snapshots, Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    // Make a copy as we'll modify it.<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    final Map&lt;String,Long&gt; filesToUpdate = new HashMap&lt;&gt;(fileSizes.size());<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    for (Entry&lt;String,Long&gt; entry : fileSizes) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      filesToUpdate.put(entry.getKey(), entry.getValue());<a name="line.163"></a>
-<span class="sourceLineNo">164</span>    }<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    // Track the change in size to each snapshot<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    final Map&lt;String,Long&gt; snapshotSizeChanges = new HashMap&lt;&gt;();<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    for (String snapshot : snapshots) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>      // For each file in `filesToUpdate`, check if `snapshot` refers to it.<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      // If `snapshot` does, remove it from `filesToUpdate` and add it to `snapshotSizeChanges`.<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      bucketFilesToSnapshot(snapshot, filesToUpdate, snapshotSizeChanges);<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      if (filesToUpdate.isEmpty()) {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        // If we have no more files recently archived, we have nothing more to check<a name="line.172"></a>
-<span class="sourceLineNo">173</span>        break;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      }<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    }<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    // We have computed changes to the snapshot size, we need to record them.<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    if (!snapshotSizeChanges.isEmpty()) {<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      if (LOG.isTraceEnabled()) {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>        LOG.trace("Writing snapshot size changes for: " + snapshotSizeChanges);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      }<a name="line.180"></a>
-<span class="sourceLineNo">181</span>      persistSnapshotSizeChanges(snapshotSizeChanges);<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  }<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /**<a name="line.185"></a>
-<span class="sourceLineNo">186</span>   * For the given snapshot, find all files which this {@code snapshotName} references. After a file<a name="line.186"></a>
-<span class="sourceLineNo">187</span>   * is found to be referenced by the snapshot, it is removed from {@code filesToUpdate} and<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * {@code snapshotSizeChanges} is updated in concert.<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   *<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * @param snapshotName The snapshot to check<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * @param filesToUpdate A mapping of archived files to their size<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * @param snapshotSizeChanges A mapping of snapshots and their change in size<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  void bucketFilesToSnapshot(<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      String snapshotName, Map&lt;String,Long&gt; filesToUpdate, Map&lt;String,Long&gt; snapshotSizeChanges)<a name="line.195"></a>
-<span class="sourceLineNo">196</span>          throws IOException {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    // A quick check to avoid doing work if the caller unnecessarily invoked this method.<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    if (filesToUpdate.isEmpty()) {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      return;<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    }<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>    Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(<a name="line.202"></a>
-<span class="sourceLineNo">203</span>        snapshotName, FSUtils.getRootDir(conf));<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    // For each region referenced by the snapshot<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      // For each column family in this region<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        // And each store file in that family<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          Long valueOrNull = filesToUpdate.remove(sf.getName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (valueOrNull != null) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            // This storefile was recently archived, we should update this snapshot with its size<a name="line.214"></a>
-<span class="sourceLineNo">215</span>            snapshotSizeChanges.merge(snapshotName, valueOrNull, Long::sum);<a name="line.215"></a>
-<span class="sourceLineNo">216</span>          }<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          // Short-circuit, if we have no more files that were archived, we don't need to iterate<a name="line.217"></a>
-<span class="sourceLineNo">218</span>          // over the rest of the snapshot.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          if (filesToUpdate.isEmpty()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        }<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    }<a name="line.224"></a>
-<span class="sourceLineNo">225</span>  }<a name="line.225"></a>
-<span class="sourceLineNo">226</span><a name="line.226"></a>
-<span class="sourceLineNo">227</span>  /**<a name="line.227"></a>
-<span class="sourceLineNo">228</span>   * Reads the current size for each snapshot to update, generates a new update based on that value,<a name="line.228"></a>
-<span class="sourceLineNo">229</span>   * and then writes the new update.<a name="line.229"></a>
-<span class="sourceLineNo">230</span>   *<a name="line.230"></a>
-<span class="sourceLineNo">231</span>   * @param snapshotSizeChanges A map of snapshot name to size change<a name="line.231"></a>
-<span class="sourceLineNo">232</span>   */<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  void persistSnapshotSizeChanges(Map&lt;String,Long&gt; snapshotSizeChanges) throws IOException {<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      // Create a list (with a more typical ordering implied)<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      final List&lt;Entry&lt;String,Long&gt;&gt; snapshotSizeEntries = new ArrayList&lt;&gt;(<a name="line.236"></a>
-<span class="sourceLineNo">237</span>          snapshotSizeChanges.entrySet());<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      // Create the Gets for each snapshot we need to update<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      final List&lt;Get&gt; snapshotSizeGets = snapshotSizeEntries.stream()<a name="line.239"></a>
-<span class="sourceLineNo">240</span>          .map((e) -&gt; QuotaTableUtil.makeGetForSnapshotSize(tn, e.getKey()))<a name="line.240"></a>
-<span class="sourceLineNo">241</span>          .collect(Collectors.toList());<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      final Iterator&lt;Entry&lt;String,Long&gt;&gt; iterator = snapshotSizeEntries.iterator();<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      // A List to store each Put we'll create from the Get's we retrieve<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      final List&lt;Put&gt; updates = new ArrayList&lt;&gt;(snapshotSizeEntries.size());<a name="line.244"></a>
-<span class="sourceLineNo">245</span><a name="line.245"></a>
-<span class="sourceLineNo">246</span>      // TODO Push this down to the RegionServer with a coprocessor:<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      //<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      // We would really like to piggy-back on the row-lock already being grabbed<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      // to handle the update of the row in the quota table. However, because the value<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      // is a serialized protobuf, the standard Increment API doesn't work for us. With a CP, we<a name="line.250"></a>
-<span class="sourceLineNo">251</span>      // can just send the size deltas to the RS and atomically update the serialized PB object<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      // while relying on the row-lock for synchronization.<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      //<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      // Synchronizing on the namespace string is a "minor smell" but passable as this is<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      // only invoked via a single caller (the active Master). Using the namespace name lets us<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      // have some parallelism without worry of on caller seeing stale data from the quota table.<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      synchronized (getLockForNamespace(tn.getNamespaceAsString())) {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        final Result[] existingSnapshotSizes = quotaTable.get(snapshotSizeGets);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        long totalSizeChange = 0;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        // Read the current size values (if they exist) to generate the new value<a name="line.260"></a>
-<span class="sourceLineNo">261</span>        for (Result result : existingSnapshotSizes) {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>          Entry&lt;String,Long&gt; entry = iterator.next();<a name="line.262"></a>
-<span class="sourceLineNo">263</span>          String snapshot = entry.getKey();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>          Long size = entry.getValue();<a name="line.264"></a>
-<span class="sourceLineNo">265</span>          // Track the total size change for the namespace this table belongs in<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          totalSizeChange += size;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          // Get the size of the previous value (or zero)<a name="line.267"></a>
-<span class="sourceLineNo">268</span>          long previousSize = getSnapshotSizeFromResult(result);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>          // Create an update. A file was archived from the table, so the table's size goes<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          // down, but the snapshot's size goes up.<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          updates.add(QuotaTableUtil.createPutForSnapshotSize(tn, snapshot, previousSize + size));<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        }<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>        // Create an update for the summation of all snapshots in the namespace<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        if (totalSizeChange != 0) {<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          long previousSize = getPreviousNamespaceSnapshotSize(<a name="line.276"></a>
-<span class="sourceLineNo">277</span>              quotaTable, tn.getNamespaceAsString());<a name="line.277"></a>
-<span class="sourceLineNo">278</span>          updates.add(QuotaTableUtil.createPutForNamespaceSnapshotSize(<a name="line.278"></a>
-<span class="sourceLineNo">279</span>              tn.getNamespaceAsString(), previousSize + totalSizeChange));<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>        // Send all of the quota table updates in one batch.<a name="line.282"></a>
-<span class="sourceLineNo">283</span>        List&lt;Object&gt; failures = new ArrayList&lt;&gt;();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        final Object[] results = new Object[updates.size()];<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        quotaTable.batch(updates, results);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        for (Object result : results) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          // A null result is an error condition (all RPC attempts failed)<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          if (!(result instanceof Result)) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>            failures.add(result);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>          }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>        }<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        // Propagate a failure if any updates failed<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        if (!failures.isEmpty()) {<a name="line.293"></a>
-<span class="sourceLineNo">294</span>          throw new QuotaSnapshotSizeSerializationException(<a name="line.294"></a>
-<span class="sourceLineNo">295</span>              "Failed to write some snapshot size updates: " + failures);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        }<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    } catch (InterruptedException e) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      Thread.currentThread().interrupt();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      return;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
-<span class="sourceLineNo">303</span><a name="line.303"></a>
-<span class="sourceLineNo">304</span>  /**<a name="line.304"></a>
-<span class="sourceLineNo">305</span>   * Fetches the current size of all snapshots in the given {@code namespace}.<a name="line.305"></a>
-<span class="sourceLineNo">306</span>   *<a name="line.306"></a>
-<span class="sourceLineNo">307</span>   * @param quotaTable The HBase quota table<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * @param namespace Namespace to fetch the sum of snapshot sizes for<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   * @return The size of all snapshot sizes for the namespace in bytes.<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   */<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  long getPreviousNamespaceSnapshotSize(Table quotaTable, String namespace) throws IOException {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    // Update the size of each snapshot for all snapshots in a namespace.<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    Result r = quotaTable.get(<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        QuotaTableUtil.createGetNamespaceSnapshotSize(namespace));<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    return getSnapshotSizeFromResult(r);<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  }<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  /**<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * Extracts the size component from a serialized {@link SpaceQuotaSnapshot} protobuf.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   *<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @param r A Result containing one cell with a SpaceQuotaSnapshot protobuf<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @return The size in bytes of the snapshot.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  long getSnapshotSizeFromResult(Result r) throws InvalidProtocolBufferException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>    // Per javadoc, Result should only be null if an exception was thrown. So, if we're here,<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    // we should be non-null. If we can't advance to the first cell, same as "no cell".<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    if (!r.isEmpty() &amp;&amp; r.advance()) {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      return QuotaTableUtil.parseSnapshotSize(r.current());<a name="line.328"></a>
-<span class="sourceLineNo">329</span>    }<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    return 0L;<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  }<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  @Override<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  public long computeAndStoreSnapshotSizes(<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      Collection&lt;String&gt; currentSnapshots) throws IOException {<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    // Record what the current snapshots are<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    this.currentSnapshots = new ArrayList&lt;&gt;(currentSnapshots);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    Collections.sort(this.currentSnapshots);<a name="line.338"></a>
-<span class="sourceLineNo">339</span><a name="line.339"></a>
-<span class="sourceLineNo">340</span>    // compute new size for table + snapshots for that table<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    List&lt;SnapshotWithSize&gt; snapshotSizes = computeSnapshotSizes(this.currentSnapshots);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    if (LOG.isTraceEnabled()) {<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      LOG.trace("Computed snapshot sizes for " + tn + " of " + snapshotSizes);<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>    // Compute the total size of all snapshots against our table<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    final long totalSnapshotSize = snapshotSizes.stream().mapToLong((sws) -&gt; sws.getSize()).sum();<a name="line.347"></a>
-<span class="sourceLineNo">348</span><a name="line.348"></a>
-<span class="sourceLineNo">349</span>    writeLock.lock();<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    try {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>      // Persist the size of each snapshot<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.352"></a>
-<span class="sourceLineNo">353</span>        persistSnapshotSizes(quotaTable, snapshotSizes);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      }<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>      // Report the last time we did a recomputation<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      lastFullCompute = System.nanoTime();<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>      return totalSnapshotSize;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    } finally {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>      writeLock.unlock();<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    }<a name="line.362"></a>
-<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
-<span class="sourceLineNo">364</span><a name="line.364"></a>
-<span class="sourceLineNo">365</span>  @Override<a name="line.365"></a>
-<span class="sourceLineNo">366</span>  public String toString() {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    StringBuilder sb = new StringBuilder();<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    sb.append(getClass().getSimpleName()).append("[");<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    sb.append("tableName=").append(tn).append(", currentSnapshots=");<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    sb.append(currentSnapshots).append(", lastFullCompute=").append(lastFullCompute);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    return sb.append("]").toString();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  }<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * Computes the size of each snapshot against the table referenced by {@code this}.<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   *<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * @param snapshots A sorted list of snapshots against {@code tn}.<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * @return A list of the size for each snapshot against {@code tn}.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   */<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  List&lt;SnapshotWithSize&gt; computeSnapshotSizes(List&lt;String&gt; snapshots) throws IOException {<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    final List&lt;SnapshotWithSize&gt; snapshotSizes = new ArrayList&lt;&gt;(snapshots.size());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    final Path rootDir = FSUtils.getRootDir(conf);<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>    // Get the map of store file names to store file path for this table<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    final Set&lt;String&gt; tableReferencedStoreFiles;<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    try {<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      tableReferencedStoreFiles = FSUtils.getTableStoreFilePathMap(fs, rootDir).keySet();<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    } catch (InterruptedException e) {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      Thread.currentThread().interrupt();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      return null;<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>    if (LOG.isTraceEnabled()) {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      LOG.trace("Paths for " + tn + ": " + tableReferencedStoreFiles);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>    // For each snapshot on this table, get the files which the snapshot references which<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    // the table does not.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    Set&lt;String&gt; snapshotReferencedFiles = new HashSet&lt;&gt;();<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    for (String snapshotName : snapshots) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, rootDir);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>      if (LOG.isTraceEnabled()) {<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        LOG.trace("Files referenced by other snapshots: " + snapshotReferencedFiles);<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      }<a name="line.407"></a>
-<span class="sourceLineNo">408</span><a name="line.408"></a>
-<span class="sourceLineNo">409</span>      // Get the set of files from the manifest that this snapshot references which are not also<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      // referenced by the originating table.<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      Set&lt;StoreFileReference&gt; unreferencedStoreFileNames = getStoreFilesFromSnapshot(<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          manifest, (sfn) -&gt; !tableReferencedStoreFiles.contains(sfn)<a name="line.412"></a>
-<span class="sourceLineNo">413</span>              &amp;&amp; !snapshotReferencedFiles.contains(sfn));<a name="line.413"></a>
-<span class="sourceLineNo">414</span><a name="line.414"></a>
-<span class="sourceLineNo">415</span>      if (LOG.isTraceEnabled()) {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>        LOG.trace("Snapshot " + snapshotName + " solely references the files: "<a name="line.416"></a>
-<span class="sourceLineNo">417</span>            + unreferencedStoreFileNames);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      }<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>      // Compute the size of the store files for this snapshot<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      long size = getSizeOfStoreFiles(tn, unreferencedStoreFileNames);<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      if (LOG.isTraceEnabled()) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        LOG.trace("Computed size of " + snapshotName + " to be " + size);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      }<a name="line.424"></a>
-<span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>      // Persist this snapshot's size into the map<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      snapshotSizes.add(new SnapshotWithSize(snapshotName, size));<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>      // Make sure that we don't double-count the same file<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      for (StoreFileReference ref : unreferencedStoreFileNames) {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        for (String fileNames : ref.getFamilyToFilesMapping().values()) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>          snapshotReferencedFiles.add(fileNames);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>        }<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      }<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>    return snapshotSizes;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>  }<a name="line.438"></a>
-<span class="sourceLineNo">439</span><a name="line.439"></a>
-<span class="sourceLineNo">440</span>  /**<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   * Computes the size of each store file in {@code storeFileNames}<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   */<a name="line.442"></a>
-<span class="sourceLineNo">443</span>  long getSizeOfStoreFiles(TableName tn, Set&lt;StoreFileReference&gt; storeFileNames) {<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    return storeFileNames.stream()<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        .collect(Collectors.summingLong((sfr) -&gt; getSizeOfStoreFile(tn, sfr)));<a name="line.445"></a>
-<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>  /**<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   * Computes the size of the store files for a single region.<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   */<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  long getSizeOfStoreFile(TableName tn, StoreFileReference storeFileName) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    String regionName = storeFileName.getRegionName();<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    return storeFileName.getFamilyToFilesMapping()<a name="line.453"></a>
-<span class="sourceLineNo">454</span>        .entries().stream()<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        .collect(Collectors.summingLong((e) -&gt;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>            getSizeOfStoreFile(tn, regionName, e.getKey(), e.getValue())));<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * Computes the size of the store file given its name, region and family name in<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * the archive directory.<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  long getSizeOfStoreFile(<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      TableName tn, String regionName, String family, String storeFile) {<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    Path familyArchivePath;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      familyArchivePath = HFileArchiveUtil.getStoreArchivePath(conf, tn, regionName, family);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } catch (IOException e) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      LOG.warn("Could not compute path for the archive directory for the region", e);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      return 0L;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    }<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    Path fileArchivePath = new Path(familyArchivePath, storeFile);<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    try {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>      if (fs.exists(fileArchivePath)) {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>        FileStatus[] status = fs.listStatus(fileArchivePath);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        if (1 != status.length) {<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          LOG.warn("Expected " + fileArchivePath +<a name="line.477"></a>
-<span class="sourceLineNo">478</span>              " to be a file but was a directory, ignoring reference");<a name="line.478"></a>
-<span class="sourceLineNo">479</span>          return 0L;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>        }<a name="line.480"></a>
-<span class="sourceLineNo">481</span>        return status[0].getLen();<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    } catch (IOException e) {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      LOG.warn("Could not obtain the status of " + fileArchivePath, e);<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      return 0L;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    LOG.warn("Expected " + fileArchivePath + " to exist but does not, ignoring reference.");<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    return 0L;<a name="line.488"></a>
-<span class="sourceLineNo">489</span>  }<a name="line.489"></a>
-<span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>  /**<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * Extracts the names of the store files referenced by this snapshot which satisfy the given<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * predicate (the predicate returns {@code true}).<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  Set&lt;StoreFileReference&gt; getStoreFilesFromSnapshot(<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      SnapshotManifest manifest, Predicate&lt;String&gt; filter) {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    Set&lt;StoreFileReference&gt; references = new HashSet&lt;&gt;();<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    // For each region referenced by the snapshot<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      StoreFileReference regionReference = new StoreFileReference(<a name="line.500"></a>
-<span class="sourceLineNo">501</span>          ProtobufUtil.toRegionInfo(rm.getRegionInfo()).getEncodedName());<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>      // For each column family in this region<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        final String familyName = ff.getFamilyName().toStringUtf8();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        // And each store file in that family<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          String storeFileName = sf.getName();<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          // A snapshot only "inherits" a files size if it uniquely refers to it (no table<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          // and no other snapshot references it).<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          if (filter.test(storeFileName)) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>            regionReference.addFamilyStoreFile(familyName, storeFileName);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          }<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      // Only add this Region reference if we retained any files.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      if (!regionReference.getFamilyToFilesMapping().isEmpty()) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        references.add(regionReference);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>    }<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    return references;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  }<a name="line.522"></a>
-<span class="sourceLineNo">523</span><a name="line.523"></a>
-<span class="sourceLineNo">524</span>  /**<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * Writes the snapshot sizes to the provided {@code table}.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   */<a name="line.526"></a>
-<span class="sourceLineNo">527</span>  void persistSnapshotSizes(<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      Table table, List&lt;SnapshotWithSize&gt; snapshotSizes) throws IOException {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    // Convert each entry in the map to a Put and write them to the quota table<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    table.put(snapshotSizes<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        .stream()<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        .map(sws -&gt; QuotaTableUtil.createPutForSnapshotSize(<a name="line.532"></a>
-<span class="sourceLineNo">533</span>            tn, sws.getName(), sws.getSize()))<a name="line.533"></a>
-<span class="sourceLineNo">534</span>        .collect(Collectors.toList()));<a name="line.534"></a>
-<span class="sourceLineNo">535</span>  }<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>  /**<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * A struct encapsulating the name of a snapshot and its "size" on the filesystem. This size is<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * defined as the amount of filesystem space taken by the files the snapshot refers to which<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * the originating table no longer refers to.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  static class SnapshotWithSize {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    private final String name;<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    private final long size;<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    SnapshotWithSize(String name, long size) {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      this.name = Objects.requireNonNull(name);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.size = size;<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    String getName() {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      return name;<a name="line.552"></a>
-<span class="sourceLineNo">553</span>    }<a name="line.553"></a>
-<span class="sourceLineNo">554</span><a name="line.554"></a>
-<span class="sourceLineNo">555</span>    long getSize() {<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      return size;<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    }<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    @Override<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    public int hashCode() {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      return new HashCodeBuilder().append(name).append(size).toHashCode();<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    @Override<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    public boolean equals(Object o) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      if (this == o) {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>        return true;<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      }<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>      if (!(o instanceof SnapshotWithSize)) {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>        return false;<a name="line.571"></a>
-<span class="sourceLineNo">572</span>      }<a name="line.572"></a>
-<span class="sourceLineNo">573</span><a name="line.573"></a>
-<span class="sourceLineNo">574</span>      SnapshotWithSize other = (SnapshotWithSize) o;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      return name.equals(other.name) &amp;&amp; size == other.size;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    @Override<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    public String toString() {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      StringBuilder sb = new StringBuilder(32);<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      return sb.append("SnapshotWithSize:[").append(name).append(" ")<a name="line.581"></a>
-<span class="sourceLineNo">582</span>          .append(StringUtils.byteDesc(size)).append("]").toString();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    }<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>  /**<a name="line.586"></a>
-<span class="sourceLineNo">587</span>   * A reference to a collection of files in the archive directory for a single region.<a name="line.587"></a>
-<span class="sourceLineNo">588</span>   */<a name="line.588"></a>
-<span class="sourceLineNo">589</span>  static class StoreFileReference {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    private final String regionName;<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    private final Multimap&lt;String,String&gt; familyToFiles;<a name="line.591"></a>
-<span class="sourceLineNo">592</span><a name="line.592"></a>
-<span class="sourceLineNo">593</span>    StoreFileReference(String regionName) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      this.regionName = Objects.requireNonNull(regionName);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      familyToFiles = HashMultimap.create();<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    }<a name="line.596"></a>
-<span class="sourceLineNo">597</span><a name="line.597"></a>
-<span class="sourceLineNo">598</span>    String getRegionName() {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      return regionName;<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    }<a name="line.600"></a>
-<span class="sourceLineNo">601</span><a name="line.601"></a>
-<span class="sourceLineNo">602</span>    Multimap&lt;String,String&gt; getFamilyToFilesMapping() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      return familyToFiles;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    }<a name="line.604"></a>
-<span class="sourceLineNo">605</span><a name="line.605"></a>
-<span class="sourceLineNo">606</span>    void addFamilyStoreFile(String family, String storeFileName) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>      familyToFiles.put(family, storeFileName);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    }<a name="line.608"></a>
-<span class="sourceLineNo">609</span><a name="line.609"></a>
-<span class="sourceLineNo">610</span>    @Override<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    public int hashCode() {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      return new HashCodeBuilder().append(regionName).append(familyToFiles).toHashCode();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>    @Override<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    public boolean equals(Object o) {<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      if (this == o) {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>        return true;<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      }<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      if (!(o instanceof StoreFileReference)) {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>        return false;<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      }<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      StoreFileReference other = (StoreFileReference) o;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      return regionName.equals(other.regionName) &amp;&amp; familyToFiles.equals(other.familyToFiles);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    }<a name="line.625"></a>
-<span class="sourceLineNo">626</span><a name="line.626"></a>
-<span class="sourceLineNo">627</span>    @Override<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    public String toString() {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>      StringBuilder sb = new StringBuilder();<a name="line.629"></a>
-<span class="sourceLineNo">630</span>      return sb.append("StoreFileReference[region=").append(regionName).append(", files=")<a name="line.630"></a>
-<span class="sourceLineNo">631</span>          .append(familyToFiles).append("]").toString();<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    }<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  }<a name="line.633"></a>
-<span class="sourceLineNo">634</span>}<a name="line.634"></a>
+<span class="sourceLineNo">037</span>import org.apache.commons.lang3.builder.HashCodeBuilder;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileStatus;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FileSystem;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.Path;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.TableName;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.client.Get;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.Put;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.client.Result;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.client.Table;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.util.StringUtils;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.slf4j.Logger;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.slf4j.LoggerFactory;<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;<a name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;<a name="line.66"></a>
+<span class="sourceLineNo">067</span><a name="line.67"></a>
+<span class="sourceLineNo">068</span>/**<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * Tracks file archiving and updates the hbase quota table.<a name="line.69"></a>
+<span class="sourceLineNo">070</span> */<a name="line.70"></a>
+<span class="sourceLineNo">071</span>@InterfaceAudience.Private<a name="line.71"></a>
+<span class="sourceLineNo">072</span>public class FileArchiverNotifierImpl implements FileArchiverNotifier {<a name="line.72"></a>
+<span class="sourceLineNo">073</span>  private static final Logger LOG = LoggerFactory.getLogger(FileArchiverNotifierImpl.class);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  private final Connection conn;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  private final Configuration conf;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>  private final FileSystem fs;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>  private final TableName tn;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  private final ReadLock readLock;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  private final WriteLock writeLock;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  private volatile long lastFullCompute = Long.MIN_VALUE;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  private List&lt;String&gt; currentSnapshots = Collections.emptyList();<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  private static final Map&lt;String,Object&gt; NAMESPACE_LOCKS = new HashMap&lt;&gt;();<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * An Exception thrown when SnapshotSize updates to hbase:quota fail to be written.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  @InterfaceAudience.Private<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  public static class QuotaSnapshotSizeSerializationException extends IOException {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    private static final long serialVersionUID = 1L;<a name="line.89"></a>
+<span class="sourceLineNo">090</span><a name="line.90"></a>
+<span class="sourceLineNo">091</span>    public QuotaSnapshotSizeSerializationException(String msg) {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>      super(msg);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    }<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
+<span class="sourceLineNo">095</span><a name="line.95"></a>
+<span class="sourceLineNo">096</span>  public FileArchiverNotifierImpl(<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      Connection conn, Configuration conf, FileSystem fs, TableName tn) {<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    this.conn = conn;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    this.conf = conf;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    this.fs = fs;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    this.tn = tn;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    readLock = lock.readLock();<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    writeLock = lock.writeLock();<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  static synchronized Object getLockForNamespace(String namespace) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    return NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) -&gt; new Object());<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>  /**<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Returns a strictly-increasing measure of time extracted by {@link System#nanoTime()}.<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   */<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  long getLastFullCompute() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    return lastFullCompute;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  @Override<a name="line.118"></a>
+<span class="sourceLineNo">119</span>  public void addArchivedFiles(Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    long start = System.nanoTime();<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    readLock.lock();<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    try {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      // We want to catch the case where we got an archival request, but there was a full<a name="line.123"></a>
+<span class="sourceLineNo">124</span>      // re-computation in progress that was blocking us. Most likely, the full computation is going<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      // to already include the changes we were going to make.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      //<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      // Same as "start &lt; lastFullCompute" but avoiding numeric overflow per the<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      // System.nanoTime() javadoc<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      if (lastFullCompute != Long.MIN_VALUE &amp;&amp; start - lastFullCompute &lt; 0) {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>        if (LOG.isTraceEnabled()) {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>          LOG.trace("A full computation was performed after this request was received."<a name="line.131"></a>
+<span class="sourceLineNo">132</span>              + " Ignoring requested updates: " + fileSizes);<a name="line.132"></a>
+<span class="sourceLineNo">133</span>        }<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        return;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      }<a name="line.135"></a>
+<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">137</span>      if (LOG.isTraceEnabled()) {<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        LOG.trace("currentSnapshots: " + currentSnapshots + " fileSize: "+ fileSizes);<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      }<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>      // Write increment to quota table for the correct snapshot. Only do this if we have snapshots<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      // and some files that were archived.<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      if (!currentSnapshots.isEmpty() &amp;&amp; !fileSizes.isEmpty()) {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        // We get back the files which no snapshot referenced (the files which will be deleted soon)<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        groupArchivedFiledBySnapshotAndRecordSize(currentSnapshots, fileSizes);<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      }<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    } finally {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      readLock.unlock();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    }<a name="line.149"></a>
+<span class="sourceLineNo">150</span>  }<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>  /**<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * For each file in the map, this updates the first snapshot (lexicographic snapshot name) that<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   * references this file. The result of this computation is serialized to the quota table.<a name="line.154"></a>
+<span class="sourceLineNo">155</span>   *<a name="line.155"></a>
+<span class="sourceLineNo">156</span>   * @param snapshots A collection of HBase snapshots to group the files into<a name="line.156"></a>
+<span class="sourceLineNo">157</span>   * @param fileSizes A map of file names to their sizes<a name="line.157"></a>
+<span class="sourceLineNo">158</span>   */<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  void groupArchivedFiledBySnapshotAndRecordSize(<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      List&lt;String&gt; snapshots, Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    // Make a copy as we'll modify it.<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    final Map&lt;String,Long&gt; filesToUpdate = new HashMap&lt;&gt;(fileSizes.size());<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    for (Entry&lt;String,Long&gt; entry : fileSizes) {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      filesToUpdate.put(entry.getKey(), entry.getValue());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    }<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    // Track the change in size to each snapshot<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    final Map&lt;String,Long&gt; snapshotSizeChanges = new HashMap&lt;&gt;();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    for (String snapshot : snapshots) {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      // For each file in `filesToUpdate`, check if `snapshot` refers to it.<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      // If `snapshot` does, remove it from `filesToUpdate` and add it to `snapshotSizeChanges`.<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      bucketFilesToSnapshot(snapshot, filesToUpdate, snapshotSizeChanges);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      if (filesToUpdate.isEmpty()) {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        // If we have no more files recently archived, we have nothing more to check<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        break;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      }<a name="line.175"></a>
+<span class="sourceLineNo">176</span>    }<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    // We have computed changes to the snapshot size, we need to record them.<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    if (!snapshotSizeChanges.isEmpty()) {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      if (LOG.isTraceEnabled()) {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>        LOG.trace("Writing snapshot size changes for: " + snapshotSizeChanges);<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      }<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      persistSnapshotSizeChanges(snapshotSizeChanges);<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>  /**<a name="line.186"></a>
+<span class="sourceLineNo">187</span>   * For the given snapshot, find all files which this {@code snapshotName} references. After a file<a name="line.187"></a>
+<span class="sourceLineNo">188</span>   * is found to be referenced by the snapshot, it is removed from {@code filesToUpdate} and<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * {@code snapshotSizeChanges} is updated in concert.<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   *<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * @param snapshotName The snapshot to check<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * @param filesToUpdate A mapping of archived files to their size<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * @param snapshotSizeChanges A mapping of snapshots and their change in size<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  void bucketFilesToSnapshot(<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      String snapshotName, Map&lt;String,Long&gt; filesToUpdate, Map&lt;String,Long&gt; snapshotSizeChanges)<a name="line.196"></a>
+<span class="sourceLineNo">197</span>          throws IOException {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    // A quick check to avoid doing work if the caller unnecessarily invoked this method.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    if (filesToUpdate.isEmpty()) {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      return;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>    Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(<a name="line.203"></a>
+<span class="sourceLineNo">204</span>        snapshotName, FSUtils.getRootDir(conf));<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    // For each region referenced by the snapshot<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>      // For each column family in this region<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>        // And each store file in that family<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          Long valueOrNull = filesToUpdate.remove(sf.getName());<a name="line.213"></a>
+<span class="sourceLineNo">214</span>          if (valueOrNull != null) {<a name="line.214"></a>
+<span class="sourceLineNo">215</span>            // This storefile was recently archived, we should update this snapshot with its size<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            snapshotSizeChanges.merge(snapshotName, valueOrNull, Long::sum);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          }<a name="line.217"></a>
+<span class="sourceLineNo">218</span>          // Short-circuit, if we have no more files that were archived, we don't need to iterate<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          // over the rest of the snapshot.<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          if (filesToUpdate.isEmpty()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            return;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>          }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      }<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    }<a name="line.225"></a>
+<span class="sourceLineNo">226</span>  }<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  /**<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   * Reads the current size for each snapshot to update, generates a new update based on that value,<a name="line.229"></a>
+<span class="sourceLineNo">230</span>   * and then writes the new update.<a name="line.230"></a>
+<span class="sourceLineNo">231</span>   *<a name="line.231"></a>
+<span class="sourceLineNo">232</span>   * @param snapshotSizeChanges A map of snapshot name to size change<a name="line.232"></a>
+<span class="sourceLineNo">233</span>   */<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  void persistSnapshotSizeChanges(Map&lt;String,Long&gt; snapshotSizeChanges) throws IOException {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      // Create a list (with a more typical ordering implied)<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      final List&lt;Entry&lt;String,Long&gt;&gt; snapshotSizeEntries = new ArrayList&lt;&gt;(<a name="line.237"></a>
+<span class="sourceLineNo">238</span>          snapshotSizeChanges.entrySet());<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      // Create the Gets for each snapshot we need to update<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      final List&lt;Get&gt; snapshotSizeGets = snapshotSizeEntries.stream()<a name="line.240"></a>
+<span class="sourceLineNo">241</span>          .map((e) -&gt; QuotaTableUtil.makeGetForSnapshotSize(tn, e.getKey()))<a name="line.241"></a>
+<span class="sourceLineNo">242</span>          .collect(Collectors.toList());<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      final Iterator&lt;Entry&lt;String,Long&gt;&gt; iterator = snapshotSizeEntries.iterator();<a name="line.243"></a>
+<span class="sourceLineNo">244</span>      // A List to store each Put we'll create from the Get's we retrieve<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      final List&lt;Put&gt; updates = new ArrayList&lt;&gt;(snapshotSizeEntries.size());<a name="line.245"></a>
+<span class="sourceLineNo">246</span><a name="line.246"></a>
+<span class="sourceLineNo">247</span>      // TODO Push this down to the RegionServer with a coprocessor:<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      //<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      // We would really like to piggy-back on the row-lock already being grabbed<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      // to handle the update of the row in the quota table. However, because the value<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      // is a serialized protobuf, the standard Increment API doesn't work for us. With a CP, we<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      // can just send the size deltas to the RS and atomically update the serialized PB object<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      // while relying on the row-lock for synchronization.<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      //<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      // Synchronizing on the namespace string is a "minor smell" but passable as this is<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      // only invoked via a single caller (the active Master). Using the namespace name lets us<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      // have some parallelism without worry of on caller seeing stale data from the quota table.<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      synchronized (getLockForNamespace(tn.getNamespaceAsString())) {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        final Result[] existingSnapshotSizes = quotaTable.get(snapshotSizeGets);<a name="line.259"></a>
+<span class="sourceLineNo">260</span>        long totalSizeChange = 0;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        // Read the current size values (if they exist) to generate the new value<a name="line.261"></a>
+<span class="sourceLineNo">262</span>        for (Result result : existingSnapshotSizes) {<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          Entry&lt;String,Long&gt; entry = iterator.next();<a name="line.263"></a>
+<span class="sourceLineNo">264</span>          String snapshot = entry.getKey();<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          Long size = entry.getValue();<a name="line.265"></a>
+<span class="sourceLineNo">266</span>          // Track the total size change for the namespace this table belongs in<a name="line.266"></a>
+<span class="sourceLineNo">267</span>          totalSizeChange += size;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>          // Get the size of the previous value (or zero)<a name="line.268"></a>
+<span class="sourceLineNo">269</span>          long previousSize = getSnapshotSizeFromResult(result);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>          // Create an update. A file was archived from the table, so the table's size goes<a name="line.270"></a>
+<span class="sourceLineNo">271</span>          // down, but the snapshot's size goes up.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>          updates.add(QuotaTableUtil.createPutForSnapshotSize(tn, snapshot, previousSize + size));<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>        // Create an update for the summation of all snapshots in the namespace<a name="line.275"></a>
+<span class="sourceLineNo">276</span>        if (totalSizeChange != 0) {<a name="line.276"></a>
+<span class="sourceLineNo">277</span>          long previousSize = getPreviousNamespaceSnapshotSize(<a name="line.277"></a>
+<span class="sourceLineNo">278</span>              quotaTable, tn.getNamespaceAsString());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>          updates.add(QuotaTableUtil.createPutForNamespaceSnapshotSize(<a name="line.279"></a>
+<span class="sourceLineNo">280</span>              tn.getNamespaceAsString(), previousSize + totalSizeChange));<a name="line.280"></a>
+<span class="sourceLineNo">281</span>        }<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>        // Send all of the quota table updates in one batch.<a name="line.283"></a>
+<span class="sourceLineNo">284</span>        List&lt;Object&gt; failures = new ArrayList&lt;&gt;();<a name="line.284"></a>
+<span class="sourceLineNo">285</span>        final Object[] results = new Object[updates.size()];<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        quotaTable.batch(updates, results);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>        for (Object result : results) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>          // A null result is an error condition (all RPC attempts failed)<a name="line.288"></a>
+<span class="sourceLineNo">289</span>          if (!(result instanceof Result)) {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>            failures.add(result);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>          }<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        }<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        // Propagate a failure if any updates failed<a name="line.293"></a>
+<span class="sourceLineNo">294</span>        if (!failures.isEmpty()) {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>       

<TRUNCATED>

[08/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
index 7d1dba6..11f9915 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
@@ -50,114 +50,114 @@
 <span class="sourceLineNo">042</span>import java.util.concurrent.atomic.AtomicBoolean;<a name="line.42"></a>
 <span class="sourceLineNo">043</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.atomic.LongAdder;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.commons.collections.CollectionUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.conf.Configuration;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.Path;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.Cell;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.Server;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.ServerName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.TableName;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.Append;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Get;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.Put;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Result;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.Row;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.net.Address;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.User;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.slf4j.Logger;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.slf4j.LoggerFactory;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.152"></a>
+<span class="sourceLineNo">045</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.conf.Configuration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.Cell;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HConstants;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.Server;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.ServerName;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.client.Append;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Get;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Put;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.Result;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Row;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.net.Address;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.User;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.slf4j.Logger;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.slf4j.LoggerFactory;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.152"></a>
 <span class="sourceLineNo">153</span><a name="line.153"></a>
 <span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.154"></a>
 <span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;<a name="line.155"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
index 7d1dba6..11f9915 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
@@ -50,114 +50,114 @@
 <span class="sourceLineNo">042</span>import java.util.concurrent.atomic.AtomicBoolean;<a name="line.42"></a>
 <span class="sourceLineNo">043</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.atomic.LongAdder;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.commons.collections.CollectionUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.conf.Configuration;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.Path;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.Cell;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.Server;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.ServerName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.TableName;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.Append;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Get;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.Put;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Result;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.Row;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.net.Address;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.User;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.slf4j.Logger;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.slf4j.LoggerFactory;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.152"></a>
+<span class="sourceLineNo">045</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.conf.Configuration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.Cell;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HConstants;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.Server;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.ServerName;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.client.Append;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Get;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Put;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.Result;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Row;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.net.Address;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.User;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.slf4j.Logger;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.slf4j.LoggerFactory;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.152"></a>
 <span class="sourceLineNo">153</span><a name="line.153"></a>
 <span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.154"></a>
 <span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;<a name="line.155"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
index 7d1dba6..11f9915 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
@@ -50,114 +50,114 @@
 <span class="sourceLineNo">042</span>import java.util.concurrent.atomic.AtomicBoolean;<a name="line.42"></a>
 <span class="sourceLineNo">043</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.atomic.LongAdder;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.commons.collections.CollectionUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.conf.Configuration;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.Path;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.Cell;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.Server;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.ServerName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.TableName;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.Append;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Get;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.Put;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Result;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.Row;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.net.Address;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.User;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.slf4j.Logger;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.slf4j.LoggerFactory;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.152"></a>
+<span class="sourceLineNo">045</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.conf.Configuration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.Cell;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HConstants;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.Server;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.ServerName;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.client.Append;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Get;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Put;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.Result;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Row;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.net.Address;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.User;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.slf4j.Logger;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.slf4j.LoggerFactory;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.152"></a>
 <span class="sourceLineNo">153</span><a name="line.153"></a>
 <span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.154"></a>
 <span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;<a name="line.155"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreScanner.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreScanner.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreScanner.html
index ef16ae7..baef4a1 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreScanner.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/StoreScanner.html
@@ -35,33 +35,33 @@
 <span class="sourceLineNo">027</span>import java.util.concurrent.CountDownLatch;<a name="line.27"></a>
 <span class="sourceLineNo">028</span>import java.util.concurrent.locks.ReentrantLock;<a name="line.28"></a>
 <span class="sourceLineNo">029</span><a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.commons.collections.CollectionUtils;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.Cell;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.HConstants;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.executor.ExecutorService;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.filter.Filter;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.regionserver.handler.ParallelSeekHandler;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.regionserver.querymatcher.CompactionScanQueryMatcher;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.slf4j.Logger;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.slf4j.LoggerFactory;<a name="line.53"></a>
-<span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.56"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.Cell;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.HConstants;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.executor.ExecutorService;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.filter.Filter;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.regionserver.handler.ParallelSeekHandler;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.regionserver.querymatcher.CompactionScanQueryMatcher;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.slf4j.Logger;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.slf4j.LoggerFactory;<a name="line.52"></a>
+<span class="sourceLineNo">053</span><a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.56"></a>
 <span class="sourceLineNo">057</span><a name="line.57"></a>
 <span class="sourceLineNo">058</span>/**<a name="line.58"></a>
 <span class="sourceLineNo">059</span> * Scanner scans both the memstore and the Store. Coalesce KeyValue stream into List&amp;lt;KeyValue&amp;gt;<a name="line.59"></a>


[16/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
index e080cd6..73a1036 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
@@ -42,604 +42,605 @@
 <span class="sourceLineNo">034</span>import java.util.function.Predicate;<a name="line.34"></a>
 <span class="sourceLineNo">035</span>import java.util.stream.Collectors;<a name="line.35"></a>
 <span class="sourceLineNo">036</span><a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.commons.lang.builder.HashCodeBuilder;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.commons.logging.Log;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.commons.logging.LogFactory;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.conf.Configuration;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileStatus;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileSystem;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.Path;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.client.Get;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.client.Put;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.client.Result;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.Table;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.util.StringUtils;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.55"></a>
-<span class="sourceLineNo">056</span><a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span><a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>/**<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * Tracks file archiving and updates the hbase quota table.<a name="line.68"></a>
-<span class="sourceLineNo">069</span> */<a name="line.69"></a>
-<span class="sourceLineNo">070</span>@InterfaceAudience.Private<a name="line.70"></a>
-<span class="sourceLineNo">071</span>public class FileArchiverNotifierImpl implements FileArchiverNotifier {<a name="line.71"></a>
-<span class="sourceLineNo">072</span>  private static final Log LOG = LogFactory.getLog(FileArchiverNotifierImpl.class);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  private final Connection conn;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>  private final Configuration conf;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  private final FileSystem fs;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  private final TableName tn;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  private final ReadLock readLock;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private final WriteLock writeLock;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  private volatile long lastFullCompute = Long.MIN_VALUE;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  private List&lt;String&gt; currentSnapshots = Collections.emptyList();<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  private static final Map&lt;String,Object&gt; NAMESPACE_LOCKS = new HashMap&lt;&gt;();<a name="line.81"></a>
-<span class="sourceLineNo">082</span><a name="line.82"></a>
-<span class="sourceLineNo">083</span>  /**<a name="line.83"></a>
-<span class="sourceLineNo">084</span>   * An Exception thrown when SnapshotSize updates to hbase:quota fail to be written.<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   */<a name="line.85"></a>
-<span class="sourceLineNo">086</span>  @InterfaceAudience.Private<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  public static class QuotaSnapshotSizeSerializationException extends IOException {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    private static final long serialVersionUID = 1L;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>    public QuotaSnapshotSizeSerializationException(String msg) {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>      super(msg);<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    }<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  public FileArchiverNotifierImpl(<a name="line.95"></a>
-<span class="sourceLineNo">096</span>      Connection conn, Configuration conf, FileSystem fs, TableName tn) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    this.conn = conn;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    this.conf = conf;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    this.fs = fs;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    this.tn = tn;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    readLock = lock.readLock();<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    writeLock = lock.writeLock();<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  }<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  static synchronized Object getLockForNamespace(String namespace) {<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    return NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) -&gt; new Object());<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  }<a name="line.108"></a>
-<span class="sourceLineNo">109</span><a name="line.109"></a>
-<span class="sourceLineNo">110</span>  /**<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * Returns a strictly-increasing measure of time extracted by {@link System#nanoTime()}.<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   */<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  long getLastFullCompute() {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    return lastFullCompute;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  }<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  @Override<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  public void addArchivedFiles(Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    long start = System.nanoTime();<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    readLock.lock();<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    try {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>      // We want to catch the case where we got an archival request, but there was a full<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      // re-computation in progress that was blocking us. Most likely, the full computation is going<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      // to already include the changes we were going to make.<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      //<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      // Same as "start &lt; lastFullCompute" but avoiding numeric overflow per the<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      // System.nanoTime() javadoc<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      if (lastFullCompute != Long.MIN_VALUE &amp;&amp; start - lastFullCompute &lt; 0) {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>        if (LOG.isTraceEnabled()) {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>          LOG.trace("A full computation was performed after this request was received."<a name="line.130"></a>
-<span class="sourceLineNo">131</span>              + " Ignoring requested updates: " + fileSizes);<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        }<a name="line.132"></a>
-<span class="sourceLineNo">133</span>        return;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      }<a name="line.134"></a>
-<span class="sourceLineNo">135</span><a name="line.135"></a>
-<span class="sourceLineNo">136</span>      if (LOG.isTraceEnabled()) {<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        LOG.trace("currentSnapshots: " + currentSnapshots + " fileSize: "+ fileSizes);<a name="line.137"></a>
-<span class="sourceLineNo">138</span>      }<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>      // Write increment to quota table for the correct snapshot. Only do this if we have snapshots<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      // and some files that were archived.<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      if (!currentSnapshots.isEmpty() &amp;&amp; !fileSizes.isEmpty()) {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        // We get back the files which no snapshot referenced (the files which will be deleted soon)<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        groupArchivedFiledBySnapshotAndRecordSize(currentSnapshots, fileSizes);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      }<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    } finally {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      readLock.unlock();<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  }<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>  /**<a name="line.151"></a>
-<span class="sourceLineNo">152</span>   * For each file in the map, this updates the first snapshot (lexicographic snapshot name) that<a name="line.152"></a>
-<span class="sourceLineNo">153</span>   * references this file. The result of this computation is serialized to the quota table.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>   *<a name="line.154"></a>
-<span class="sourceLineNo">155</span>   * @param snapshots A collection of HBase snapshots to group the files into<a name="line.155"></a>
-<span class="sourceLineNo">156</span>   * @param fileSizes A map of file names to their sizes<a name="line.156"></a>
-<span class="sourceLineNo">157</span>   */<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  void groupArchivedFiledBySnapshotAndRecordSize(<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      List&lt;String&gt; snapshots, Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    // Make a copy as we'll modify it.<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    final Map&lt;String,Long&gt; filesToUpdate = new HashMap&lt;&gt;(fileSizes.size());<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    for (Entry&lt;String,Long&gt; entry : fileSizes) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      filesToUpdate.put(entry.getKey(), entry.getValue());<a name="line.163"></a>
-<span class="sourceLineNo">164</span>    }<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    // Track the change in size to each snapshot<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    final Map&lt;String,Long&gt; snapshotSizeChanges = new HashMap&lt;&gt;();<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    for (String snapshot : snapshots) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>      // For each file in `filesToUpdate`, check if `snapshot` refers to it.<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      // If `snapshot` does, remove it from `filesToUpdate` and add it to `snapshotSizeChanges`.<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      bucketFilesToSnapshot(snapshot, filesToUpdate, snapshotSizeChanges);<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      if (filesToUpdate.isEmpty()) {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        // If we have no more files recently archived, we have nothing more to check<a name="line.172"></a>
-<span class="sourceLineNo">173</span>        break;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      }<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    }<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    // We have computed changes to the snapshot size, we need to record them.<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    if (!snapshotSizeChanges.isEmpty()) {<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      if (LOG.isTraceEnabled()) {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>        LOG.trace("Writing snapshot size changes for: " + snapshotSizeChanges);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      }<a name="line.180"></a>
-<span class="sourceLineNo">181</span>      persistSnapshotSizeChanges(snapshotSizeChanges);<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  }<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /**<a name="line.185"></a>
-<span class="sourceLineNo">186</span>   * For the given snapshot, find all files which this {@code snapshotName} references. After a file<a name="line.186"></a>
-<span class="sourceLineNo">187</span>   * is found to be referenced by the snapshot, it is removed from {@code filesToUpdate} and<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * {@code snapshotSizeChanges} is updated in concert.<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   *<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * @param snapshotName The snapshot to check<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * @param filesToUpdate A mapping of archived files to their size<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * @param snapshotSizeChanges A mapping of snapshots and their change in size<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  void bucketFilesToSnapshot(<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      String snapshotName, Map&lt;String,Long&gt; filesToUpdate, Map&lt;String,Long&gt; snapshotSizeChanges)<a name="line.195"></a>
-<span class="sourceLineNo">196</span>          throws IOException {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    // A quick check to avoid doing work if the caller unnecessarily invoked this method.<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    if (filesToUpdate.isEmpty()) {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      return;<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    }<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>    Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(<a name="line.202"></a>
-<span class="sourceLineNo">203</span>        snapshotName, FSUtils.getRootDir(conf));<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    // For each region referenced by the snapshot<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      // For each column family in this region<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        // And each store file in that family<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          Long valueOrNull = filesToUpdate.remove(sf.getName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (valueOrNull != null) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            // This storefile was recently archived, we should update this snapshot with its size<a name="line.214"></a>
-<span class="sourceLineNo">215</span>            snapshotSizeChanges.merge(snapshotName, valueOrNull, Long::sum);<a name="line.215"></a>
-<span class="sourceLineNo">216</span>          }<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          // Short-circuit, if we have no more files that were archived, we don't need to iterate<a name="line.217"></a>
-<span class="sourceLineNo">218</span>          // over the rest of the snapshot.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          if (filesToUpdate.isEmpty()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        }<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    }<a name="line.224"></a>
-<span class="sourceLineNo">225</span>  }<a name="line.225"></a>
-<span class="sourceLineNo">226</span><a name="line.226"></a>
-<span class="sourceLineNo">227</span>  /**<a name="line.227"></a>
-<span class="sourceLineNo">228</span>   * Reads the current size for each snapshot to update, generates a new update based on that value,<a name="line.228"></a>
-<span class="sourceLineNo">229</span>   * and then writes the new update.<a name="line.229"></a>
-<span class="sourceLineNo">230</span>   *<a name="line.230"></a>
-<span class="sourceLineNo">231</span>   * @param snapshotSizeChanges A map of snapshot name to size change<a name="line.231"></a>
-<span class="sourceLineNo">232</span>   */<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  void persistSnapshotSizeChanges(Map&lt;String,Long&gt; snapshotSizeChanges) throws IOException {<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      // Create a list (with a more typical ordering implied)<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      final List&lt;Entry&lt;String,Long&gt;&gt; snapshotSizeEntries = new ArrayList&lt;&gt;(<a name="line.236"></a>
-<span class="sourceLineNo">237</span>          snapshotSizeChanges.entrySet());<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      // Create the Gets for each snapshot we need to update<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      final List&lt;Get&gt; snapshotSizeGets = snapshotSizeEntries.stream()<a name="line.239"></a>
-<span class="sourceLineNo">240</span>          .map((e) -&gt; QuotaTableUtil.makeGetForSnapshotSize(tn, e.getKey()))<a name="line.240"></a>
-<span class="sourceLineNo">241</span>          .collect(Collectors.toList());<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      final Iterator&lt;Entry&lt;String,Long&gt;&gt; iterator = snapshotSizeEntries.iterator();<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      // A List to store each Put we'll create from the Get's we retrieve<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      final List&lt;Put&gt; updates = new ArrayList&lt;&gt;(snapshotSizeEntries.size());<a name="line.244"></a>
-<span class="sourceLineNo">245</span><a name="line.245"></a>
-<span class="sourceLineNo">246</span>      // TODO Push this down to the RegionServer with a coprocessor:<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      //<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      // We would really like to piggy-back on the row-lock already being grabbed<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      // to handle the update of the row in the quota table. However, because the value<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      // is a serialized protobuf, the standard Increment API doesn't work for us. With a CP, we<a name="line.250"></a>
-<span class="sourceLineNo">251</span>      // can just send the size deltas to the RS and atomically update the serialized PB object<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      // while relying on the row-lock for synchronization.<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      //<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      // Synchronizing on the namespace string is a "minor smell" but passable as this is<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      // only invoked via a single caller (the active Master). Using the namespace name lets us<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      // have some parallelism without worry of on caller seeing stale data from the quota table.<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      synchronized (getLockForNamespace(tn.getNamespaceAsString())) {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        final Result[] existingSnapshotSizes = quotaTable.get(snapshotSizeGets);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        long totalSizeChange = 0;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        // Read the current size values (if they exist) to generate the new value<a name="line.260"></a>
-<span class="sourceLineNo">261</span>        for (Result result : existingSnapshotSizes) {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>          Entry&lt;String,Long&gt; entry = iterator.next();<a name="line.262"></a>
-<span class="sourceLineNo">263</span>          String snapshot = entry.getKey();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>          Long size = entry.getValue();<a name="line.264"></a>
-<span class="sourceLineNo">265</span>          // Track the total size change for the namespace this table belongs in<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          totalSizeChange += size;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          // Get the size of the previous value (or zero)<a name="line.267"></a>
-<span class="sourceLineNo">268</span>          long previousSize = getSnapshotSizeFromResult(result);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>          // Create an update. A file was archived from the table, so the table's size goes<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          // down, but the snapshot's size goes up.<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          updates.add(QuotaTableUtil.createPutForSnapshotSize(tn, snapshot, previousSize + size));<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        }<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>        // Create an update for the summation of all snapshots in the namespace<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        if (totalSizeChange != 0) {<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          long previousSize = getPreviousNamespaceSnapshotSize(<a name="line.276"></a>
-<span class="sourceLineNo">277</span>              quotaTable, tn.getNamespaceAsString());<a name="line.277"></a>
-<span class="sourceLineNo">278</span>          updates.add(QuotaTableUtil.createPutForNamespaceSnapshotSize(<a name="line.278"></a>
-<span class="sourceLineNo">279</span>              tn.getNamespaceAsString(), previousSize + totalSizeChange));<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>        // Send all of the quota table updates in one batch.<a name="line.282"></a>
-<span class="sourceLineNo">283</span>        List&lt;Object&gt; failures = new ArrayList&lt;&gt;();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        final Object[] results = new Object[updates.size()];<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        quotaTable.batch(updates, results);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        for (Object result : results) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          // A null result is an error condition (all RPC attempts failed)<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          if (!(result instanceof Result)) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>            failures.add(result);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>          }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>        }<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        // Propagate a failure if any updates failed<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        if (!failures.isEmpty()) {<a name="line.293"></a>
-<span class="sourceLineNo">294</span>          throw new QuotaSnapshotSizeSerializationException(<a name="line.294"></a>
-<span class="sourceLineNo">295</span>              "Failed to write some snapshot size updates: " + failures);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        }<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    } catch (InterruptedException e) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      Thread.currentThread().interrupt();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      return;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
-<span class="sourceLineNo">303</span><a name="line.303"></a>
-<span class="sourceLineNo">304</span>  /**<a name="line.304"></a>
-<span class="sourceLineNo">305</span>   * Fetches the current size of all snapshots in the given {@code namespace}.<a name="line.305"></a>
-<span class="sourceLineNo">306</span>   *<a name="line.306"></a>
-<span class="sourceLineNo">307</span>   * @param quotaTable The HBase quota table<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * @param namespace Namespace to fetch the sum of snapshot sizes for<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   * @return The size of all snapshot sizes for the namespace in bytes.<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   */<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  long getPreviousNamespaceSnapshotSize(Table quotaTable, String namespace) throws IOException {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    // Update the size of each snapshot for all snapshots in a namespace.<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    Result r = quotaTable.get(<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        QuotaTableUtil.createGetNamespaceSnapshotSize(namespace));<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    return getSnapshotSizeFromResult(r);<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  }<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  /**<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * Extracts the size component from a serialized {@link SpaceQuotaSnapshot} protobuf.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   *<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @param r A Result containing one cell with a SpaceQuotaSnapshot protobuf<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @return The size in bytes of the snapshot.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  long getSnapshotSizeFromResult(Result r) throws InvalidProtocolBufferException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>    // Per javadoc, Result should only be null if an exception was thrown. So, if we're here,<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    // we should be non-null. If we can't advance to the first cell, same as "no cell".<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    if (!r.isEmpty() &amp;&amp; r.advance()) {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      return QuotaTableUtil.parseSnapshotSize(r.current());<a name="line.328"></a>
-<span class="sourceLineNo">329</span>    }<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    return 0L;<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  }<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  @Override<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  public long computeAndStoreSnapshotSizes(<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      Collection&lt;String&gt; currentSnapshots) throws IOException {<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    // Record what the current snapshots are<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    this.currentSnapshots = new ArrayList&lt;&gt;(currentSnapshots);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    Collections.sort(this.currentSnapshots);<a name="line.338"></a>
-<span class="sourceLineNo">339</span><a name="line.339"></a>
-<span class="sourceLineNo">340</span>    // compute new size for table + snapshots for that table<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    List&lt;SnapshotWithSize&gt; snapshotSizes = computeSnapshotSizes(this.currentSnapshots);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    if (LOG.isTraceEnabled()) {<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      LOG.trace("Computed snapshot sizes for " + tn + " of " + snapshotSizes);<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>    // Compute the total size of all snapshots against our table<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    final long totalSnapshotSize = snapshotSizes.stream().mapToLong((sws) -&gt; sws.getSize()).sum();<a name="line.347"></a>
-<span class="sourceLineNo">348</span><a name="line.348"></a>
-<span class="sourceLineNo">349</span>    writeLock.lock();<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    try {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>      // Persist the size of each snapshot<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.352"></a>
-<span class="sourceLineNo">353</span>        persistSnapshotSizes(quotaTable, snapshotSizes);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      }<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>      // Report the last time we did a recomputation<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      lastFullCompute = System.nanoTime();<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>      return totalSnapshotSize;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    } finally {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>      writeLock.unlock();<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    }<a name="line.362"></a>
-<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
-<span class="sourceLineNo">364</span><a name="line.364"></a>
-<span class="sourceLineNo">365</span>  @Override<a name="line.365"></a>
-<span class="sourceLineNo">366</span>  public String toString() {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    StringBuilder sb = new StringBuilder();<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    sb.append(getClass().getSimpleName()).append("[");<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    sb.append("tableName=").append(tn).append(", currentSnapshots=");<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    sb.append(currentSnapshots).append(", lastFullCompute=").append(lastFullCompute);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    return sb.append("]").toString();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  }<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * Computes the size of each snapshot against the table referenced by {@code this}.<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   *<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * @param snapshots A sorted list of snapshots against {@code tn}.<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * @return A list of the size for each snapshot against {@code tn}.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   */<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  List&lt;SnapshotWithSize&gt; computeSnapshotSizes(List&lt;String&gt; snapshots) throws IOException {<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    final List&lt;SnapshotWithSize&gt; snapshotSizes = new ArrayList&lt;&gt;(snapshots.size());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    final Path rootDir = FSUtils.getRootDir(conf);<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>    // Get the map of store file names to store file path for this table<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    final Set&lt;String&gt; tableReferencedStoreFiles;<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    try {<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      tableReferencedStoreFiles = FSUtils.getTableStoreFilePathMap(fs, rootDir).keySet();<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    } catch (InterruptedException e) {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      Thread.currentThread().interrupt();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      return null;<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>    if (LOG.isTraceEnabled()) {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      LOG.trace("Paths for " + tn + ": " + tableReferencedStoreFiles);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>    // For each snapshot on this table, get the files which the snapshot references which<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    // the table does not.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    Set&lt;String&gt; snapshotReferencedFiles = new HashSet&lt;&gt;();<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    for (String snapshotName : snapshots) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, rootDir);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>      if (LOG.isTraceEnabled()) {<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        LOG.trace("Files referenced by other snapshots: " + snapshotReferencedFiles);<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      }<a name="line.407"></a>
-<span class="sourceLineNo">408</span><a name="line.408"></a>
-<span class="sourceLineNo">409</span>      // Get the set of files from the manifest that this snapshot references which are not also<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      // referenced by the originating table.<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      Set&lt;StoreFileReference&gt; unreferencedStoreFileNames = getStoreFilesFromSnapshot(<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          manifest, (sfn) -&gt; !tableReferencedStoreFiles.contains(sfn)<a name="line.412"></a>
-<span class="sourceLineNo">413</span>              &amp;&amp; !snapshotReferencedFiles.contains(sfn));<a name="line.413"></a>
-<span class="sourceLineNo">414</span><a name="line.414"></a>
-<span class="sourceLineNo">415</span>      if (LOG.isTraceEnabled()) {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>        LOG.trace("Snapshot " + snapshotName + " solely references the files: "<a name="line.416"></a>
-<span class="sourceLineNo">417</span>            + unreferencedStoreFileNames);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      }<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>      // Compute the size of the store files for this snapshot<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      long size = getSizeOfStoreFiles(tn, unreferencedStoreFileNames);<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      if (LOG.isTraceEnabled()) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        LOG.trace("Computed size of " + snapshotName + " to be " + size);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      }<a name="line.424"></a>
-<span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>      // Persist this snapshot's size into the map<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      snapshotSizes.add(new SnapshotWithSize(snapshotName, size));<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>      // Make sure that we don't double-count the same file<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      for (StoreFileReference ref : unreferencedStoreFileNames) {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        for (String fileNames : ref.getFamilyToFilesMapping().values()) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>          snapshotReferencedFiles.add(fileNames);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>        }<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      }<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>    return snapshotSizes;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>  }<a name="line.438"></a>
-<span class="sourceLineNo">439</span><a name="line.439"></a>
-<span class="sourceLineNo">440</span>  /**<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   * Computes the size of each store file in {@code storeFileNames}<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   */<a name="line.442"></a>
-<span class="sourceLineNo">443</span>  long getSizeOfStoreFiles(TableName tn, Set&lt;StoreFileReference&gt; storeFileNames) {<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    return storeFileNames.stream()<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        .collect(Collectors.summingLong((sfr) -&gt; getSizeOfStoreFile(tn, sfr)));<a name="line.445"></a>
-<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>  /**<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   * Computes the size of the store files for a single region.<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   */<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  long getSizeOfStoreFile(TableName tn, StoreFileReference storeFileName) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    String regionName = storeFileName.getRegionName();<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    return storeFileName.getFamilyToFilesMapping()<a name="line.453"></a>
-<span class="sourceLineNo">454</span>        .entries().stream()<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        .collect(Collectors.summingLong((e) -&gt;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>            getSizeOfStoreFile(tn, regionName, e.getKey(), e.getValue())));<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * Computes the size of the store file given its name, region and family name in<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * the archive directory.<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  long getSizeOfStoreFile(<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      TableName tn, String regionName, String family, String storeFile) {<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    Path familyArchivePath;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      familyArchivePath = HFileArchiveUtil.getStoreArchivePath(conf, tn, regionName, family);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } catch (IOException e) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      LOG.warn("Could not compute path for the archive directory for the region", e);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      return 0L;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    }<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    Path fileArchivePath = new Path(familyArchivePath, storeFile);<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    try {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>      if (fs.exists(fileArchivePath)) {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>        FileStatus[] status = fs.listStatus(fileArchivePath);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        if (1 != status.length) {<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          LOG.warn("Expected " + fileArchivePath +<a name="line.477"></a>
-<span class="sourceLineNo">478</span>              " to be a file but was a directory, ignoring reference");<a name="line.478"></a>
-<span class="sourceLineNo">479</span>          return 0L;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>        }<a name="line.480"></a>
-<span class="sourceLineNo">481</span>        return status[0].getLen();<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    } catch (IOException e) {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      LOG.warn("Could not obtain the status of " + fileArchivePath, e);<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      return 0L;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    LOG.warn("Expected " + fileArchivePath + " to exist but does not, ignoring reference.");<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    return 0L;<a name="line.488"></a>
-<span class="sourceLineNo">489</span>  }<a name="line.489"></a>
-<span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>  /**<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * Extracts the names of the store files referenced by this snapshot which satisfy the given<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * predicate (the predicate returns {@code true}).<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  Set&lt;StoreFileReference&gt; getStoreFilesFromSnapshot(<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      SnapshotManifest manifest, Predicate&lt;String&gt; filter) {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    Set&lt;StoreFileReference&gt; references = new HashSet&lt;&gt;();<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    // For each region referenced by the snapshot<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      StoreFileReference regionReference = new StoreFileReference(<a name="line.500"></a>
-<span class="sourceLineNo">501</span>          ProtobufUtil.toRegionInfo(rm.getRegionInfo()).getEncodedName());<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>      // For each column family in this region<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        final String familyName = ff.getFamilyName().toStringUtf8();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        // And each store file in that family<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          String storeFileName = sf.getName();<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          // A snapshot only "inherits" a files size if it uniquely refers to it (no table<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          // and no other snapshot references it).<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          if (filter.test(storeFileName)) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>            regionReference.addFamilyStoreFile(familyName, storeFileName);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          }<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      // Only add this Region reference if we retained any files.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      if (!regionReference.getFamilyToFilesMapping().isEmpty()) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        references.add(regionReference);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>    }<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    return references;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  }<a name="line.522"></a>
-<span class="sourceLineNo">523</span><a name="line.523"></a>
-<span class="sourceLineNo">524</span>  /**<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * Writes the snapshot sizes to the provided {@code table}.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   */<a name="line.526"></a>
-<span class="sourceLineNo">527</span>  void persistSnapshotSizes(<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      Table table, List&lt;SnapshotWithSize&gt; snapshotSizes) throws IOException {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    // Convert each entry in the map to a Put and write them to the quota table<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    table.put(snapshotSizes<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        .stream()<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        .map(sws -&gt; QuotaTableUtil.createPutForSnapshotSize(<a name="line.532"></a>
-<span class="sourceLineNo">533</span>            tn, sws.getName(), sws.getSize()))<a name="line.533"></a>
-<span class="sourceLineNo">534</span>        .collect(Collectors.toList()));<a name="line.534"></a>
-<span class="sourceLineNo">535</span>  }<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>  /**<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * A struct encapsulating the name of a snapshot and its "size" on the filesystem. This size is<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * defined as the amount of filesystem space taken by the files the snapshot refers to which<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * the originating table no longer refers to.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  static class SnapshotWithSize {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    private final String name;<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    private final long size;<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    SnapshotWithSize(String name, long size) {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      this.name = Objects.requireNonNull(name);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.size = size;<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    String getName() {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      return name;<a name="line.552"></a>
-<span class="sourceLineNo">553</span>    }<a name="line.553"></a>
-<span class="sourceLineNo">554</span><a name="line.554"></a>
-<span class="sourceLineNo">555</span>    long getSize() {<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      return size;<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    }<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    @Override<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    public int hashCode() {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      return new HashCodeBuilder().append(name).append(size).toHashCode();<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    @Override<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    public boolean equals(Object o) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      if (this == o) {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>        return true;<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      }<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>      if (!(o instanceof SnapshotWithSize)) {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>        return false;<a name="line.571"></a>
-<span class="sourceLineNo">572</span>      }<a name="line.572"></a>
-<span class="sourceLineNo">573</span><a name="line.573"></a>
-<span class="sourceLineNo">574</span>      SnapshotWithSize other = (SnapshotWithSize) o;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      return name.equals(other.name) &amp;&amp; size == other.size;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    @Override<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    public String toString() {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      StringBuilder sb = new StringBuilder(32);<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      return sb.append("SnapshotWithSize:[").append(name).append(" ")<a name="line.581"></a>
-<span class="sourceLineNo">582</span>          .append(StringUtils.byteDesc(size)).append("]").toString();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    }<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>  /**<a name="line.586"></a>
-<span class="sourceLineNo">587</span>   * A reference to a collection of files in the archive directory for a single region.<a name="line.587"></a>
-<span class="sourceLineNo">588</span>   */<a name="line.588"></a>
-<span class="sourceLineNo">589</span>  static class StoreFileReference {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    private final String regionName;<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    private final Multimap&lt;String,String&gt; familyToFiles;<a name="line.591"></a>
-<span class="sourceLineNo">592</span><a name="line.592"></a>
-<span class="sourceLineNo">593</span>    StoreFileReference(String regionName) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      this.regionName = Objects.requireNonNull(regionName);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      familyToFiles = HashMultimap.create();<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    }<a name="line.596"></a>
-<span class="sourceLineNo">597</span><a name="line.597"></a>
-<span class="sourceLineNo">598</span>    String getRegionName() {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      return regionName;<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    }<a name="line.600"></a>
-<span class="sourceLineNo">601</span><a name="line.601"></a>
-<span class="sourceLineNo">602</span>    Multimap&lt;String,String&gt; getFamilyToFilesMapping() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      return familyToFiles;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    }<a name="line.604"></a>
-<span class="sourceLineNo">605</span><a name="line.605"></a>
-<span class="sourceLineNo">606</span>    void addFamilyStoreFile(String family, String storeFileName) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>      familyToFiles.put(family, storeFileName);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    }<a name="line.608"></a>
-<span class="sourceLineNo">609</span><a name="line.609"></a>
-<span class="sourceLineNo">610</span>    @Override<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    public int hashCode() {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      return new HashCodeBuilder().append(regionName).append(familyToFiles).toHashCode();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>    @Override<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    public boolean equals(Object o) {<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      if (this == o) {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>        return true;<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      }<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      if (!(o instanceof StoreFileReference)) {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>        return false;<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      }<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      StoreFileReference other = (StoreFileReference) o;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      return regionName.equals(other.regionName) &amp;&amp; familyToFiles.equals(other.familyToFiles);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    }<a name="line.625"></a>
-<span class="sourceLineNo">626</span><a name="line.626"></a>
-<span class="sourceLineNo">627</span>    @Override<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    public String toString() {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>      StringBuilder sb = new StringBuilder();<a name="line.629"></a>
-<span class="sourceLineNo">630</span>      return sb.append("StoreFileReference[region=").append(regionName).append(", files=")<a name="line.630"></a>
-<span class="sourceLineNo">631</span>          .append(familyToFiles).append("]").toString();<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    }<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  }<a name="line.633"></a>
-<span class="sourceLineNo">634</span>}<a name="line.634"></a>
+<span class="sourceLineNo">037</span>import org.apache.commons.lang3.builder.HashCodeBuilder;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileStatus;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FileSystem;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.Path;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.TableName;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.client.Get;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.Put;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.client.Result;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.client.Table;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.util.StringUtils;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.slf4j.Logger;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.slf4j.LoggerFactory;<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;<a name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;<a name="line.66"></a>
+<span class="sourceLineNo">067</span><a name="line.67"></a>
+<span class="sourceLineNo">068</span>/**<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * Tracks file archiving and updates the hbase quota table.<a name="line.69"></a>
+<span class="sourceLineNo">070</span> */<a name="line.70"></a>
+<span class="sourceLineNo">071</span>@InterfaceAudience.Private<a name="line.71"></a>
+<span class="sourceLineNo">072</span>public class FileArchiverNotifierImpl implements FileArchiverNotifier {<a name="line.72"></a>
+<span class="sourceLineNo">073</span>  private static final Logger LOG = LoggerFactory.getLogger(FileArchiverNotifierImpl.class);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  private final Connection conn;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  private final Configuration conf;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>  private final FileSystem fs;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>  private final TableName tn;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  private final ReadLock readLock;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  private final WriteLock writeLock;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  private volatile long lastFullCompute = Long.MIN_VALUE;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  private List&lt;String&gt; currentSnapshots = Collections.emptyList();<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  private static final Map&lt;String,Object&gt; NAMESPACE_LOCKS = new HashMap&lt;&gt;();<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * An Exception thrown when SnapshotSize updates to hbase:quota fail to be written.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  @InterfaceAudience.Private<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  public static class QuotaSnapshotSizeSerializationException extends IOException {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    private static final long serialVersionUID = 1L;<a name="line.89"></a>
+<span class="sourceLineNo">090</span><a name="line.90"></a>
+<span class="sourceLineNo">091</span>    public QuotaSnapshotSizeSerializationException(String msg) {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>      super(msg);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    }<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
+<span class="sourceLineNo">095</span><a name="line.95"></a>
+<span class="sourceLineNo">096</span>  public FileArchiverNotifierImpl(<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      Connection conn, Configuration conf, FileSystem fs, TableName tn) {<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    this.conn = conn;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    this.conf = conf;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    this.fs = fs;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    this.tn = tn;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    readLock = lock.readLock();<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    writeLock = lock.writeLock();<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  static synchronized Object getLockForNamespace(String namespace) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    return NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) -&gt; new Object());<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>  /**<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Returns a strictly-increasing measure of time extracted by {@link System#nanoTime()}.<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   */<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  long getLastFullCompute() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    return lastFullCompute;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  @Override<a name="line.118"></a>
+<span class="sourceLineNo">119</span>  public void addArchivedFiles(Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    long start = System.nanoTime();<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    readLock.lock();<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    try {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      // We want to catch the case where we got an archival request, but there was a full<a name="line.123"></a>
+<span class="sourceLineNo">124</span>      // re-computation in progress that was blocking us. Most likely, the full computation is going<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      // to already include the changes we were going to make.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      //<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      // Same as "start &lt; lastFullCompute" but avoiding numeric overflow per the<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      // System.nanoTime() javadoc<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      if (lastFullCompute != Long.MIN_VALUE &amp;&amp; start - lastFullCompute &lt; 0) {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>        if (LOG.isTraceEnabled()) {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>          LOG.trace("A full computation was performed after this request was received."<a name="line.131"></a>
+<span class="sourceLineNo">132</span>              + " Ignoring requested updates: " + fileSizes);<a name="line.132"></a>
+<span class="sourceLineNo">133</span>        }<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        return;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      }<a name="line.135"></a>
+<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">137</span>      if (LOG.isTraceEnabled()) {<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        LOG.trace("currentSnapshots: " + currentSnapshots + " fileSize: "+ fileSizes);<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      }<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>      // Write increment to quota table for the correct snapshot. Only do this if we have snapshots<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      // and some files that were archived.<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      if (!currentSnapshots.isEmpty() &amp;&amp; !fileSizes.isEmpty()) {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        // We get back the files which no snapshot referenced (the files which will be deleted soon)<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        groupArchivedFiledBySnapshotAndRecordSize(currentSnapshots, fileSizes);<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      }<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    } finally {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      readLock.unlock();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    }<a name="line.149"></a>
+<span class="sourceLineNo">150</span>  }<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>  /**<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * For each file in the map, this updates the first snapshot (lexicographic snapshot name) that<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   * references this file. The result of this computation is serialized to the quota table.<a name="line.154"></a>
+<span class="sourceLineNo">155</span>   *<a name="line.155"></a>
+<span class="sourceLineNo">156</span>   * @param snapshots A collection of HBase snapshots to group the files into<a name="line.156"></a>
+<span class="sourceLineNo">157</span>   * @param fileSizes A map of file names to their sizes<a name="line.157"></a>
+<span class="sourceLineNo">158</span>   */<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  void groupArchivedFiledBySnapshotAndRecordSize(<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      List&lt;String&gt; snapshots, Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    // Make a copy as we'll modify it.<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    final Map&lt;String,Long&gt; filesToUpdate = new HashMap&lt;&gt;(fileSizes.size());<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    for (Entry&lt;String,Long&gt; entry : fileSizes) {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      filesToUpdate.put(entry.getKey(), entry.getValue());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    }<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    // Track the change in size to each snapshot<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    final Map&lt;String,Long&gt; snapshotSizeChanges = new HashMap&lt;&gt;();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    for (String snapshot : snapshots) {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      // For each file in `filesToUpdate`, check if `snapshot` refers to it.<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      // If `snapshot` does, remove it from `filesToUpdate` and add it to `snapshotSizeChanges`.<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      bucketFilesToSnapshot(snapshot, filesToUpdate, snapshotSizeChanges);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      if (filesToUpdate.isEmpty()) {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        // If we have no more files recently archived, we have nothing more to check<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        break;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      }<a name="line.175"></a>
+<span class="sourceLineNo">176</span>    }<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    // We have computed changes to the snapshot size, we need to record them.<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    if (!snapshotSizeChanges.isEmpty()) {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      if (LOG.isTraceEnabled()) {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>        LOG.trace("Writing snapshot size changes for: " + snapshotSizeChanges);<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      }<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      persistSnapshotSizeChanges(snapshotSizeChanges);<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>  /**<a name="line.186"></a>
+<span class="sourceLineNo">187</span>   * For the given snapshot, find all files which this {@code snapshotName} references. After a file<a name="line.187"></a>
+<span class="sourceLineNo">188</span>   * is found to be referenced by the snapshot, it is removed from {@code filesToUpdate} and<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * {@code snapshotSizeChanges} is updated in concert.<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   *<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * @param snapshotName The snapshot to check<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * @param filesToUpdate A mapping of archived files to their size<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * @param snapshotSizeChanges A mapping of snapshots and their change in size<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  void bucketFilesToSnapshot(<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      String snapshotName, Map&lt;String,Long&gt; filesToUpdate, Map&lt;String,Long&gt; snapshotSizeChanges)<a name="line.196"></a>
+<span class="sourceLineNo">197</span>          throws IOException {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    // A quick check to avoid doing work if the caller unnecessarily invoked this method.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    if (filesToUpdate.isEmpty()) {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      return;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>    Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(<a name="line.203"></a>
+<span class="sourceLineNo">204</span>        snapshotName, FSUtils.getRootDir(conf));<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    // For each region referenced by the snapshot<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>      // For each column family in this region<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>        // And each store file in that family<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          Long valueOrNull = filesToUpdate.remove(sf.getName());<a name="line.213"></a>
+<span class="sourceLineNo">214</span>          if (valueOrNull != null) {<a name="line.214"></a>
+<span class="sourceLineNo">215</span>            // This storefile was recently archived, we should update this snapshot with its size<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            snapshotSizeChanges.merge(snapshotName, valueOrNull, Long::sum);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          }<a name="line.217"></a>
+<span class="sourceLineNo">218</span>          // Short-circuit, if we have no more files that were archived, we don't need to iterate<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          // over the rest of the snapshot.<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          if (filesToUpdate.isEmpty()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            return;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>          }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      }<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    }<a name="line.225"></a>
+<span class="sourceLineNo">226</span>  }<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  /**<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   * Reads the current size for each snapshot to update, generates a new update based on that value,<a name="line.229"></a>
+<span class="sourceLineNo">230</span>   * and then writes the new update.<a name="line.230"></a>
+<span class="sourceLineNo">231</span>   *<a name="line.231"></a>
+<span class="sourceLineNo">232</span>   * @param snapshotSizeChanges A map of snapshot name to size change<a name="line.232"></a>
+<span class="sourceLineNo">233</span>   */<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  void persistSnapshotSizeChanges(Map&lt;String,Long&gt; snapshotSizeChanges) throws IOException {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      // Create a list (with a more typical ordering implied)<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      final List&lt;Entry&lt;String,Long&gt;&gt; snapshotSizeEntries = new ArrayList&lt;&gt;(<a name="line.237"></a>
+<span class="sourceLineNo">238</span>          snapshotSizeChanges.entrySet());<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      // Create the Gets for each snapshot we need to update<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      final List&lt;Get&gt; snapshotSizeGets = snapshotSizeEntries.stream()<a name="line.240"></a>
+<span class="sourceLineNo">241</span>          .map((e) -&gt; QuotaTableUtil.makeGetForSnapshotSize(tn, e.getKey()))<a name="line.241"></a>
+<span class="sourceLineNo">242</span>          .collect(Collectors.toList());<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      final Iterator&lt;Entry&lt;String,Long&gt;&gt; iterator = snapshotSizeEntries.iterator();<a name="line.243"></a>
+<span class="sourceLineNo">244</span>      // A List to store each Put we'll create from the Get's we retrieve<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      final List&lt;Put&gt; updates = new ArrayList&lt;&gt;(snapshotSizeEntries.size());<a name="line.245"></a>
+<span class="sourceLineNo">246</span><a name="line.246"></a>
+<span class="sourceLineNo">247</span>      // TODO Push this down to the RegionServer with a coprocessor:<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      //<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      // We would really like to piggy-back on the row-lock already being grabbed<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      // to handle the update of the row in the quota table. However, because the value<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      // is a serialized protobuf, the standard Increment API doesn't work for us. With a CP, we<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      // can just send the size deltas to the RS and atomically update the serialized PB object<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      // while relying on the row-lock for synchronization.<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      //<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      // Synchronizing on the namespace string is a "minor smell" but passable as this is<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      // only invoked via a single caller (the active Master). Using the namespace name lets us<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      // have some parallelism without worry of on caller seeing stale data from the quota table.<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      synchronized (getLockForNamespace(tn.getNamespaceAsString())) {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        final Result[] existingSnapshotSizes = quotaTable.get(snapshotSizeGets);<a name="line.259"></a>
+<span class="sourceLineNo">260</span>        long totalSizeChange = 0;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        // Read the current size values (if they exist) to generate the new value<a name="line.261"></a>
+<span class="sourceLineNo">262</span>        for (Result result : existingSnapshotSizes) {<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          Entry&lt;String,Long&gt; entry = iterator.next();<a name="line.263"></a>
+<span class="sourceLineNo">264</span>          String snapshot = entry.getKey();<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          Long size = entry.getValue();<a name="line.265"></a>
+<span class="sourceLineNo">266</span>          // Track the total size change for the namespace this table belongs in<a name="line.266"></a>
+<span class="sourceLineNo">267</span>          totalSizeChange += size;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>          // Get the size of the previous value (or zero)<a name="line.268"></a>
+<span class="sourceLineNo">269</span>          long previousSize = getSnapshotSizeFromResult(result);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>          // Create an update. A file was archived from the table, so the table's size goes<a name="line.270"></a>
+<span class="sourceLineNo">271</span>          // down, but the snapshot's size goes up.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>          updates.add(QuotaTableUtil.createPutForSnapshotSize(tn, snapshot, previousSize + size));<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>        // Create an update for the summation of all snapshots in the namespace<a name="line.275"></a>
+<span class="sourceLineNo">276</span>        if (totalSizeChange != 0) {<a name="line.276"></a>
+<span class="sourceLineNo">277</span>          long previousSize = getPreviousNamespaceSnapshotSize(<a name="line.277"></a>
+<span class="sourceLineNo">278</span>              quotaTable, tn.getNamespaceAsString());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>          updates.add(QuotaTableUtil.createPutForNamespaceSnapshotSize(<a name="line.279"></a>
+<span class="sourceLineNo">280</span>              tn.getNamespaceAsString(), previousSize + totalSizeChange));<a name="line.280"></a>
+<span class="sourceLineNo">281</span>        }<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>        // Send all of the quota table updates in one batch.<a name="line.283"></a>
+<span class="sourceLineNo">284</span>        List&lt;Object&gt; failures = new ArrayList&lt;&gt;();<a name="line.284"></a>
+<span class="sourceLineNo">285</span>        final Object[] results = new Object[updates.size()];<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        quotaTable.batch(updates, results);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>        for (Object result : results) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>          // A null result is an error condition (all RPC attempts failed)<a name="line.288"></a>
+<span class="sourceLineNo">289</span>          if (!(result instanceof Result)) {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>            failures.add(result);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>          }<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        }<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        // Propagate a failure if any updates failed<a name="line.293"></a>
+<span class="sourceLineNo">294</span>        if (!failures.isEmpty()) {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          throw new QuotaSnapshotSizeSerializationException(<a name="line.295"></a>
+<span class="sourceLineNo">296</span>              "Failed to write some snapshot size updates: " + failures);<a name="lin

<TRUNCATED>

[13/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResultImpl.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.MutationBatchOperation.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>


[04/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/org/apache/hadoop/hbase/client/TestFlushFromClient.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/client/TestFlushFromClient.html b/testdevapidocs/org/apache/hadoop/hbase/client/TestFlushFromClient.html
index 4228156..1fe64a5 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/TestFlushFromClient.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/TestFlushFromClient.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.51">TestFlushFromClient</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.52">TestFlushFromClient</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -142,7 +142,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/TestFlushFromClient.html#FAMILY">FAMILY</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/TestFlushFromClient.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
@@ -273,7 +273,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.54">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.55">CLASS_RULE</a></pre>
 </li>
 </ul>
 <a name="LOG">
@@ -282,7 +282,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.57">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.58">LOG</a></pre>
 </li>
 </ul>
 <a name="TEST_UTIL">
@@ -291,7 +291,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>TEST_UTIL</h4>
-<pre>private static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.58">TEST_UTIL</a></pre>
+<pre>private static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.59">TEST_UTIL</a></pre>
 </li>
 </ul>
 <a name="asyncConn">
@@ -300,7 +300,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>asyncConn</h4>
-<pre>private static&nbsp;org.apache.hadoop.hbase.client.AsyncConnection <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.59">asyncConn</a></pre>
+<pre>private static&nbsp;org.apache.hadoop.hbase.client.AsyncConnection <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.60">asyncConn</a></pre>
 </li>
 </ul>
 <a name="SPLITS">
@@ -309,7 +309,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>SPLITS</h4>
-<pre>private static final&nbsp;byte[][] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.60">SPLITS</a></pre>
+<pre>private static final&nbsp;byte[][] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.61">SPLITS</a></pre>
 </li>
 </ul>
 <a name="ROWS">
@@ -318,7 +318,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>ROWS</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;byte[]&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.61">ROWS</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;byte[]&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.62">ROWS</a></pre>
 </li>
 </ul>
 <a name="FAMILY">
@@ -327,7 +327,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>FAMILY</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.65">FAMILY</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.66">FAMILY</a></pre>
 </li>
 </ul>
 <a name="name">
@@ -336,7 +336,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>name</h4>
-<pre>public&nbsp;org.junit.rules.TestName <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.68">name</a></pre>
+<pre>public&nbsp;org.junit.rules.TestName <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.69">name</a></pre>
 </li>
 </ul>
 <a name="tableName">
@@ -345,7 +345,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>tableName</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.TableName <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.70">tableName</a></pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.TableName <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.71">tableName</a></pre>
 </li>
 </ul>
 </li>
@@ -362,7 +362,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestFlushFromClient</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.51">TestFlushFromClient</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.52">TestFlushFromClient</a>()</pre>
 </li>
 </ul>
 </li>
@@ -379,7 +379,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setUpBeforeClass</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.73">setUpBeforeClass</a>()
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.74">setUpBeforeClass</a>()
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -393,7 +393,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tearDownAfterClass</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.79">tearDownAfterClass</a>()
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.80">tearDownAfterClass</a>()
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -407,7 +407,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setUp</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.85">setUp</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.86">setUp</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -421,7 +421,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tearDown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.100">tearDown</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.101">tearDown</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -435,7 +435,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testFlushTable</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.108">testFlushTable</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.109">testFlushTable</a>()
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -449,7 +449,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testAsyncFlushTable</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.116">testAsyncFlushTable</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.117">testAsyncFlushTable</a>()
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -463,7 +463,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testFlushRegion</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.123">testFlushRegion</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.124">testFlushRegion</a>()
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -477,7 +477,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testAsyncFlushRegion</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.134">testAsyncFlushRegion</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.135">testAsyncFlushRegion</a>()
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -491,7 +491,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testFlushRegionServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.144">testFlushRegionServer</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.145">testFlushRegionServer</a>()
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -505,7 +505,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testAsyncFlushRegionServer</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.157">testAsyncFlushRegionServer</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.158">testAsyncFlushRegionServer</a>()
                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -519,7 +519,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionInfo</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.regionserver.HRegion&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.168">getRegionInfo</a>()</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.regionserver.HRegion&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.169">getRegionInfo</a>()</pre>
 </li>
 </ul>
 <a name="getRegionInfo-org.apache.hadoop.hbase.regionserver.HRegionServer-">
@@ -528,7 +528,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getRegionInfo</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.regionserver.HRegion&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.176">getRegionInfo</a>(org.apache.hadoop.hbase.regionserver.HRegionServer&nbsp;rs)</pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.regionserver.HRegion&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html#line.177">getRegionInfo</a>(org.apache.hadoop.hbase.regionserver.HRegionServer&nbsp;rs)</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html b/testdevapidocs/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html
index ec327b2..e77daad 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.48">TestSeparateClientZKCluster</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.49">TestSeparateClientZKCluster</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -146,7 +146,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#family">family</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
@@ -265,7 +265,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.49">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.50">LOG</a></pre>
 </li>
 </ul>
 <a name="TEST_UTIL">
@@ -274,7 +274,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>TEST_UTIL</h4>
-<pre>private static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.50">TEST_UTIL</a></pre>
+<pre>private static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.51">TEST_UTIL</a></pre>
 </li>
 </ul>
 <a name="clientZkDir">
@@ -283,7 +283,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>clientZkDir</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/File.html?is-external=true" title="class or interface in java.io">File</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.51">clientZkDir</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/io/File.html?is-external=true" title="class or interface in java.io">File</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.52">clientZkDir</a></pre>
 </li>
 </ul>
 <a name="ZK_SESSION_TIMEOUT">
@@ -292,7 +292,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>ZK_SESSION_TIMEOUT</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.52">ZK_SESSION_TIMEOUT</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.53">ZK_SESSION_TIMEOUT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.client.TestSeparateClientZKCluster.ZK_SESSION_TIMEOUT">Constant Field Values</a></dd>
@@ -305,7 +305,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>clientZkCluster</h4>
-<pre>private static&nbsp;org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.53">clientZkCluster</a></pre>
+<pre>private static&nbsp;org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.54">clientZkCluster</a></pre>
 </li>
 </ul>
 <a name="family">
@@ -314,7 +314,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>family</h4>
-<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.55">family</a></pre>
+<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.56">family</a></pre>
 </li>
 </ul>
 <a name="qualifier">
@@ -323,7 +323,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>qualifier</h4>
-<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.56">qualifier</a></pre>
+<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.57">qualifier</a></pre>
 </li>
 </ul>
 <a name="row">
@@ -332,7 +332,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>row</h4>
-<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.57">row</a></pre>
+<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.58">row</a></pre>
 </li>
 </ul>
 <a name="value">
@@ -341,7 +341,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>value</h4>
-<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.58">value</a></pre>
+<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.59">value</a></pre>
 </li>
 </ul>
 <a name="newVal">
@@ -350,7 +350,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>newVal</h4>
-<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.59">newVal</a></pre>
+<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.60">newVal</a></pre>
 </li>
 </ul>
 <a name="name">
@@ -359,7 +359,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>name</h4>
-<pre>public&nbsp;org.junit.rules.TestName <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.62">name</a></pre>
+<pre>public&nbsp;org.junit.rules.TestName <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.63">name</a></pre>
 </li>
 </ul>
 <a name="CLASS_RULE">
@@ -368,7 +368,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.65">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.66">CLASS_RULE</a></pre>
 </li>
 </ul>
 </li>
@@ -385,7 +385,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestSeparateClientZKCluster</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.48">TestSeparateClientZKCluster</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.49">TestSeparateClientZKCluster</a>()</pre>
 </li>
 </ul>
 </li>
@@ -402,7 +402,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>beforeAllTests</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.69">beforeAllTests</a>()
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.70">beforeAllTests</a>()
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -416,7 +416,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>afterAllTests</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.88">afterAllTests</a>()
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.89">afterAllTests</a>()
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -430,7 +430,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testBasicOperation</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.95">testBasicOperation</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.96">testBasicOperation</a>()
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -444,7 +444,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testMasterSwitch</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.122">testMasterSwitch</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.123">testMasterSwitch</a>()
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -458,7 +458,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testMetaRegionMove</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.146">testMetaRegionMove</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.147">testMetaRegionMove</a>()
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -472,7 +472,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testMetaMoveDuringClientZkClusterRestart</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.202">testMetaMoveDuringClientZkClusterRestart</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.203">testMetaMoveDuringClientZkClusterRestart</a>()
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -486,7 +486,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testAsyncTable</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.253">testAsyncTable</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html#line.254">testAsyncTable</a>()
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html b/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
index b760a13..0808e27 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.121">TestFailedProcCleanup.CreateFailObserver</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.122">TestFailedProcCleanup.CreateFailObserver</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.hadoop.hbase.coprocessor.MasterObserver</pre>
 </li>
@@ -237,7 +237,7 @@ implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CreateFailObserver</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html#line.121">CreateFailObserver</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html#line.122">CreateFailObserver</a>()</pre>
 </li>
 </ul>
 </li>
@@ -254,7 +254,7 @@ implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 <ul class="blockList">
 <li class="blockList">
 <h4>preCreateTable</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html#line.124">preCreateTable</a>(org.apache.hadoop.hbase.coprocessor.ObserverContext&lt;org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment&gt;&nbsp;env,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html#line.125">preCreateTable</a>(org.apache.hadoop.hbase.coprocessor.ObserverContext&lt;org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment&gt;&nbsp;env,
                            org.apache.hadoop.hbase.client.TableDescriptor&nbsp;desc,
                            org.apache.hadoop.hbase.client.RegionInfo[]&nbsp;regions)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -272,7 +272,7 @@ implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMasterObserver</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;org.apache.hadoop.hbase.coprocessor.MasterObserver&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html#line.133">getMasterObserver</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;org.apache.hadoop.hbase.coprocessor.MasterObserver&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html#line.134">getMasterObserver</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>getMasterObserver</code>&nbsp;in interface&nbsp;<code>org.apache.hadoop.hbase.coprocessor.MasterCoprocessor</code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html b/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html
index 0bf588b..de4cc1d 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.138">TestFailedProcCleanup.CreateFailObserverHandler</a>
+<pre>public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.139">TestFailedProcCleanup.CreateFailObserverHandler</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.hadoop.hbase.coprocessor.MasterObserver</pre>
 </li>
@@ -237,7 +237,7 @@ implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CreateFailObserverHandler</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html#line.138">CreateFailObserverHandler</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html#line.139">CreateFailObserverHandler</a>()</pre>
 </li>
 </ul>
 </li>
@@ -254,7 +254,7 @@ implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 <ul class="blockList">
 <li class="blockList">
 <h4>preCreateTableAction</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html#line.141">preCreateTableAction</a>(org.apache.hadoop.hbase.coprocessor.ObserverContext&lt;org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment&gt;&nbsp;ctx,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html#line.142">preCreateTableAction</a>(org.apache.hadoop.hbase.coprocessor.ObserverContext&lt;org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment&gt;&nbsp;ctx,
                                  org.apache.hadoop.hbase.client.TableDescriptor&nbsp;desc,
                                  org.apache.hadoop.hbase.client.RegionInfo[]&nbsp;regions)
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -272,7 +272,7 @@ implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, org.apache.had
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMasterObserver</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;org.apache.hadoop.hbase.coprocessor.MasterObserver&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html#line.151">getMasterObserver</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true" title="class or interface in java.util">Optional</a>&lt;org.apache.hadoop.hbase.coprocessor.MasterObserver&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html#line.152">getMasterObserver</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>getMasterObserver</code>&nbsp;in interface&nbsp;<code>org.apache.hadoop.hbase.coprocessor.MasterCoprocessor</code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html b/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html
index 45b3ee4..ad3de12 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.54">TestFailedProcCleanup</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.55">TestFailedProcCleanup</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Check if CompletedProcedureCleaner cleans up failed nonce procedures.</div>
 </li>
@@ -170,7 +170,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#FAMILY">FAMILY</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
@@ -257,7 +257,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.57">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.58">CLASS_RULE</a></pre>
 </li>
 </ul>
 <a name="LOG">
@@ -266,7 +266,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.60">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.61">LOG</a></pre>
 </li>
 </ul>
 <a name="TEST_UTIL">
@@ -275,7 +275,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>TEST_UTIL</h4>
-<pre>protected static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.62">TEST_UTIL</a></pre>
+<pre>protected static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.63">TEST_UTIL</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -284,7 +284,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private static&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.63">conf</a></pre>
+<pre>private static&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.64">conf</a></pre>
 </li>
 </ul>
 <a name="TABLE">
@@ -293,7 +293,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>TABLE</h4>
-<pre>private static final&nbsp;org.apache.hadoop.hbase.TableName <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.64">TABLE</a></pre>
+<pre>private static final&nbsp;org.apache.hadoop.hbase.TableName <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.65">TABLE</a></pre>
 </li>
 </ul>
 <a name="FAMILY">
@@ -302,7 +302,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>FAMILY</h4>
-<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.65">FAMILY</a></pre>
+<pre>private static final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.66">FAMILY</a></pre>
 </li>
 </ul>
 <a name="evictionDelay">
@@ -311,7 +311,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>evictionDelay</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.66">evictionDelay</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.67">evictionDelay</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.procedure.TestFailedProcCleanup.evictionDelay">Constant Field Values</a></dd>
@@ -332,7 +332,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestFailedProcCleanup</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.54">TestFailedProcCleanup</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.55">TestFailedProcCleanup</a>()</pre>
 </li>
 </ul>
 </li>
@@ -349,7 +349,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setUpBeforeClass</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.69">setUpBeforeClass</a>()</pre>
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.70">setUpBeforeClass</a>()</pre>
 </li>
 </ul>
 <a name="tearDown--">
@@ -358,7 +358,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tearDown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.76">tearDown</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.77">tearDown</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -372,7 +372,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testFailCreateTable</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.81">testFailCreateTable</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.82">testFailCreateTable</a>()
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -386,7 +386,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testFailCreateTableAction</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.101">testFailCreateTableAction</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html#line.102">testFailCreateTableAction</a>()
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/wal/TestDisabledWAL.html b/testdevapidocs/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
index 88cfb64..88de117 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.49">TestDisabledWAL</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.50">TestDisabledWAL</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -138,7 +138,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/TestDisabledWAL.html#fam">fam</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/wal/TestDisabledWAL.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
@@ -237,7 +237,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.52">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.53">CLASS_RULE</a></pre>
 </li>
 </ul>
 <a name="name">
@@ -246,7 +246,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>name</h4>
-<pre>public&nbsp;org.junit.rules.TestName <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.56">name</a></pre>
+<pre>public&nbsp;org.junit.rules.TestName <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.57">name</a></pre>
 </li>
 </ul>
 <a name="LOG">
@@ -255,7 +255,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.58">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.59">LOG</a></pre>
 </li>
 </ul>
 <a name="TEST_UTIL">
@@ -264,7 +264,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>TEST_UTIL</h4>
-<pre>static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.59">TEST_UTIL</a></pre>
+<pre>static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.60">TEST_UTIL</a></pre>
 </li>
 </ul>
 <a name="table">
@@ -273,7 +273,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>table</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.client.Table <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.60">table</a></pre>
+<pre>private&nbsp;org.apache.hadoop.hbase.client.Table <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.61">table</a></pre>
 </li>
 </ul>
 <a name="tableName">
@@ -282,7 +282,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tableName</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.TableName <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.61">tableName</a></pre>
+<pre>private&nbsp;org.apache.hadoop.hbase.TableName <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.62">tableName</a></pre>
 </li>
 </ul>
 <a name="fam">
@@ -291,7 +291,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>fam</h4>
-<pre>private&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.62">fam</a></pre>
+<pre>private&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.63">fam</a></pre>
 </li>
 </ul>
 </li>
@@ -308,7 +308,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestDisabledWAL</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.49">TestDisabledWAL</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.50">TestDisabledWAL</a>()</pre>
 </li>
 </ul>
 </li>
@@ -325,7 +325,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>beforeClass</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.65">beforeClass</a>()
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.66">beforeClass</a>()
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -339,7 +339,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>afterClass</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.77">afterClass</a>()
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.78">afterClass</a>()
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -353,7 +353,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setup</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.82">setup</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.83">setup</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -367,7 +367,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>cleanup</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.89">cleanup</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.90">cleanup</a>()
              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -381,7 +381,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testDisabledWAL</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.95">testDisabledWAL</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html#line.96">testDisabledWAL</a>()
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html
index 2adb902..8adac62 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html
@@ -30,62 +30,63 @@
 <span class="sourceLineNo">022</span><a name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.net.BindException;<a name="line.23"></a>
 <span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.commons.logging.Log;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.commons.logging.LogFactory;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.conf.Configuration;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.28"></a>
-<span class="sourceLineNo">029</span><a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.junit.ClassRule;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.junit.Test;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.junit.experimental.categories.Category;<a name="line.32"></a>
-<span class="sourceLineNo">033</span><a name="line.33"></a>
-<span class="sourceLineNo">034</span>@Category(MediumTests.class)<a name="line.34"></a>
-<span class="sourceLineNo">035</span>public class TestClusterPortAssignment {<a name="line.35"></a>
-<span class="sourceLineNo">036</span>  @ClassRule<a name="line.36"></a>
-<span class="sourceLineNo">037</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.37"></a>
-<span class="sourceLineNo">038</span>      HBaseClassTestRule.forClass(TestClusterPortAssignment.class);<a name="line.38"></a>
-<span class="sourceLineNo">039</span><a name="line.39"></a>
-<span class="sourceLineNo">040</span>  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.40"></a>
-<span class="sourceLineNo">041</span>  private static final Log LOG = LogFactory.getLog(TestClusterPortAssignment.class);<a name="line.41"></a>
-<span class="sourceLineNo">042</span><a name="line.42"></a>
-<span class="sourceLineNo">043</span>  /**<a name="line.43"></a>
-<span class="sourceLineNo">044</span>   * Check that we can start an HBase cluster specifying a custom set of<a name="line.44"></a>
-<span class="sourceLineNo">045</span>   * RPC and infoserver ports.<a name="line.45"></a>
-<span class="sourceLineNo">046</span>   */<a name="line.46"></a>
-<span class="sourceLineNo">047</span>  @Test<a name="line.47"></a>
-<span class="sourceLineNo">048</span>  public void testClusterPortAssignment() throws Exception {<a name="line.48"></a>
-<span class="sourceLineNo">049</span>    boolean retry = false;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>    do {<a name="line.50"></a>
-<span class="sourceLineNo">051</span>      int masterPort =  HBaseTestingUtility.randomFreePort();<a name="line.51"></a>
-<span class="sourceLineNo">052</span>      int masterInfoPort =  HBaseTestingUtility.randomFreePort();<a name="line.52"></a>
-<span class="sourceLineNo">053</span>      int rsPort =  HBaseTestingUtility.randomFreePort();<a name="line.53"></a>
-<span class="sourceLineNo">054</span>      int rsInfoPort =  HBaseTestingUtility.randomFreePort();<a name="line.54"></a>
-<span class="sourceLineNo">055</span>      TEST_UTIL.getConfiguration().setBoolean(LocalHBaseCluster.ASSIGN_RANDOM_PORTS, false);<a name="line.55"></a>
-<span class="sourceLineNo">056</span>      TEST_UTIL.getConfiguration().setInt(HConstants.MASTER_PORT, masterPort);<a name="line.56"></a>
-<span class="sourceLineNo">057</span>      TEST_UTIL.getConfiguration().setInt(HConstants.MASTER_INFO_PORT, masterInfoPort);<a name="line.57"></a>
-<span class="sourceLineNo">058</span>      TEST_UTIL.getConfiguration().setInt(HConstants.REGIONSERVER_PORT, rsPort);<a name="line.58"></a>
-<span class="sourceLineNo">059</span>      TEST_UTIL.getConfiguration().setInt(HConstants.REGIONSERVER_INFO_PORT, rsInfoPort);<a name="line.59"></a>
-<span class="sourceLineNo">060</span>      try {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>        MiniHBaseCluster cluster = TEST_UTIL.startMiniCluster();<a name="line.61"></a>
-<span class="sourceLineNo">062</span>        assertTrue("Cluster failed to come up", cluster.waitForActiveAndReadyMaster(30000));<a name="line.62"></a>
-<span class="sourceLineNo">063</span>        retry = false;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>        assertEquals("Master RPC port is incorrect", masterPort,<a name="line.64"></a>
-<span class="sourceLineNo">065</span>          cluster.getMaster().getRpcServer().getListenerAddress().getPort());<a name="line.65"></a>
-<span class="sourceLineNo">066</span>        assertEquals("Master info port is incorrect", masterInfoPort,<a name="line.66"></a>
-<span class="sourceLineNo">067</span>          cluster.getMaster().getInfoServer().getPort());<a name="line.67"></a>
-<span class="sourceLineNo">068</span>        assertEquals("RS RPC port is incorrect", rsPort,<a name="line.68"></a>
-<span class="sourceLineNo">069</span>          cluster.getRegionServer(0).getRpcServer().getListenerAddress().getPort());<a name="line.69"></a>
-<span class="sourceLineNo">070</span>        assertEquals("RS info port is incorrect", rsInfoPort,<a name="line.70"></a>
-<span class="sourceLineNo">071</span>          cluster.getRegionServer(0).getInfoServer().getPort());<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      } catch (BindException e) {<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        LOG.info("Failed to bind, need to retry", e);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>        retry = true;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>      } finally {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>        TEST_UTIL.shutdownMiniCluster();<a name="line.76"></a>
-<span class="sourceLineNo">077</span>      }<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    } while (retry);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
-<span class="sourceLineNo">080</span>}<a name="line.80"></a>
+<span class="sourceLineNo">025</span>import org.apache.hadoop.conf.Configuration;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.26"></a>
+<span class="sourceLineNo">027</span><a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.junit.ClassRule;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.junit.Test;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.junit.experimental.categories.Category;<a name="line.30"></a>
+<span class="sourceLineNo">031</span><a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.slf4j.Logger;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.slf4j.LoggerFactory;<a name="line.33"></a>
+<span class="sourceLineNo">034</span><a name="line.34"></a>
+<span class="sourceLineNo">035</span>@Category(MediumTests.class)<a name="line.35"></a>
+<span class="sourceLineNo">036</span>public class TestClusterPortAssignment {<a name="line.36"></a>
+<span class="sourceLineNo">037</span>  @ClassRule<a name="line.37"></a>
+<span class="sourceLineNo">038</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.38"></a>
+<span class="sourceLineNo">039</span>      HBaseClassTestRule.forClass(TestClusterPortAssignment.class);<a name="line.39"></a>
+<span class="sourceLineNo">040</span><a name="line.40"></a>
+<span class="sourceLineNo">041</span>  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.41"></a>
+<span class="sourceLineNo">042</span>  private static final Logger LOG = LoggerFactory.getLogger(TestClusterPortAssignment.class);<a name="line.42"></a>
+<span class="sourceLineNo">043</span><a name="line.43"></a>
+<span class="sourceLineNo">044</span>  /**<a name="line.44"></a>
+<span class="sourceLineNo">045</span>   * Check that we can start an HBase cluster specifying a custom set of<a name="line.45"></a>
+<span class="sourceLineNo">046</span>   * RPC and infoserver ports.<a name="line.46"></a>
+<span class="sourceLineNo">047</span>   */<a name="line.47"></a>
+<span class="sourceLineNo">048</span>  @Test<a name="line.48"></a>
+<span class="sourceLineNo">049</span>  public void testClusterPortAssignment() throws Exception {<a name="line.49"></a>
+<span class="sourceLineNo">050</span>    boolean retry = false;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>    do {<a name="line.51"></a>
+<span class="sourceLineNo">052</span>      int masterPort =  HBaseTestingUtility.randomFreePort();<a name="line.52"></a>
+<span class="sourceLineNo">053</span>      int masterInfoPort =  HBaseTestingUtility.randomFreePort();<a name="line.53"></a>
+<span class="sourceLineNo">054</span>      int rsPort =  HBaseTestingUtility.randomFreePort();<a name="line.54"></a>
+<span class="sourceLineNo">055</span>      int rsInfoPort =  HBaseTestingUtility.randomFreePort();<a name="line.55"></a>
+<span class="sourceLineNo">056</span>      TEST_UTIL.getConfiguration().setBoolean(LocalHBaseCluster.ASSIGN_RANDOM_PORTS, false);<a name="line.56"></a>
+<span class="sourceLineNo">057</span>      TEST_UTIL.getConfiguration().setInt(HConstants.MASTER_PORT, masterPort);<a name="line.57"></a>
+<span class="sourceLineNo">058</span>      TEST_UTIL.getConfiguration().setInt(HConstants.MASTER_INFO_PORT, masterInfoPort);<a name="line.58"></a>
+<span class="sourceLineNo">059</span>      TEST_UTIL.getConfiguration().setInt(HConstants.REGIONSERVER_PORT, rsPort);<a name="line.59"></a>
+<span class="sourceLineNo">060</span>      TEST_UTIL.getConfiguration().setInt(HConstants.REGIONSERVER_INFO_PORT, rsInfoPort);<a name="line.60"></a>
+<span class="sourceLineNo">061</span>      try {<a name="line.61"></a>
+<span class="sourceLineNo">062</span>        MiniHBaseCluster cluster = TEST_UTIL.startMiniCluster();<a name="line.62"></a>
+<span class="sourceLineNo">063</span>        assertTrue("Cluster failed to come up", cluster.waitForActiveAndReadyMaster(30000));<a name="line.63"></a>
+<span class="sourceLineNo">064</span>        retry = false;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>        assertEquals("Master RPC port is incorrect", masterPort,<a name="line.65"></a>
+<span class="sourceLineNo">066</span>          cluster.getMaster().getRpcServer().getListenerAddress().getPort());<a name="line.66"></a>
+<span class="sourceLineNo">067</span>        assertEquals("Master info port is incorrect", masterInfoPort,<a name="line.67"></a>
+<span class="sourceLineNo">068</span>          cluster.getMaster().getInfoServer().getPort());<a name="line.68"></a>
+<span class="sourceLineNo">069</span>        assertEquals("RS RPC port is incorrect", rsPort,<a name="line.69"></a>
+<span class="sourceLineNo">070</span>          cluster.getRegionServer(0).getRpcServer().getListenerAddress().getPort());<a name="line.70"></a>
+<span class="sourceLineNo">071</span>        assertEquals("RS info port is incorrect", rsInfoPort,<a name="line.71"></a>
+<span class="sourceLineNo">072</span>          cluster.getRegionServer(0).getInfoServer().getPort());<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      } catch (BindException e) {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>        LOG.info("Failed to bind, need to retry", e);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>        retry = true;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>      } finally {<a name="line.76"></a>
+<span class="sourceLineNo">077</span>        TEST_UTIL.shutdownMiniCluster();<a name="line.77"></a>
+<span class="sourceLineNo">078</span>      }<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    } while (retry);<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
+<span class="sourceLineNo">081</span>}<a name="line.81"></a>
 
 
 


[14/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BulkLoadListener.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>


[24/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/checkstyle-aggregate.html
----------------------------------------------------------------------
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 86842ab..67a62c1 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Checkstyle Results</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -18536,7 +18536,7 @@
 <td>imports</td>
 <td>UnusedImports</td>
 <td>Unused import - org.apache.hadoop.conf.Configuration.</td>
-<td>27</td></tr></table></div>
+<td>25</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.TestFSTableDescriptorForceCreation.java">org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java</h3>
 <table border="0" class="table table-striped">
@@ -27464,7 +27464,7 @@
 <td>blocks</td>
 <td>NeedBraces</td>
 <td>'if' construct must use '{}'s.</td>
-<td>146</td></tr></table></div>
+<td>147</td></tr></table></div>
 <div class="section">
 <h3 id="org.apache.hadoop.hbase.client.RpcRetryingCaller.java">org/apache/hadoop/hbase/client/RpcRetryingCaller.java</h3>
 <table border="0" class="table table-striped">
@@ -86249,7 +86249,7 @@
 <td>imports</td>
 <td>ImportOrder</td>
 <td>Wrong order for 'org.apache.hadoop.hbase.KeyValue' import.</td>
-<td>37</td></tr>
+<td>36</td></tr>
 <tr class="b">
 <td><img src="images/icon_error_sml.gif" alt="" />&#160;Error</td>
 <td>indentation</td>
@@ -123417,7 +123417,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/coc.html
----------------------------------------------------------------------
diff --git a/coc.html b/coc.html
index c11a645..0144c87 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Code of Conduct Policy
@@ -375,7 +375,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/dependencies.html
----------------------------------------------------------------------
diff --git a/dependencies.html b/dependencies.html
index 917c2c5..461e974 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependencies</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -440,7 +440,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/dependency-convergence.html
----------------------------------------------------------------------
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 4af4e18..76564c8 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Reactor Dependency Convergence</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -1105,7 +1105,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/dependency-info.html
----------------------------------------------------------------------
diff --git a/dependency-info.html b/dependency-info.html
index 0c1d73a..6d9a8c7 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Dependency Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -313,7 +313,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/dependency-management.html
----------------------------------------------------------------------
diff --git a/dependency-management.html b/dependency-management.html
index 7806822..fa99e3e 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependency Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -969,7 +969,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/constant-values.html
----------------------------------------------------------------------
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index d4d265b..60dc3eb 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3768,21 +3768,21 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Thu Apr 19 14:39:00 UTC 2018"</code></td>
+<td class="colLast"><code>"Fri Apr 20 14:39:14 UTC 2018"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#revision">revision</a></code></td>
-<td class="colLast"><code>"556b22374423ff087c0583d02ae4298d4d4f2e6b"</code></td>
+<td class="colLast"><code>"914de1141699142bce1486468a742233d9440b23"</code></td>
 </tr>
 <tr class="altColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.srcChecksum">
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#srcChecksum">srcChecksum</a></code></td>
-<td class="colLast"><code>"83ef0b63e39df660933d8e09ab06a005"</code></td>
+<td class="colLast"><code>"8a7d9057695428a69e4cd5d02ff0686c"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.url">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html b/devapidocs/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
index 8841b7e..5281f72 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
@@ -129,7 +129,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.LimitedPrivate(value="Configuration")
-public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.50">BackupLogCleaner</a>
+public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.51">BackupLogCleaner</a>
 extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLogCleanerDelegate.html" title="class in org.apache.hadoop.hbase.master.cleaner">BaseLogCleanerDelegate</a></pre>
 <div class="block">Implementation of a log cleaner that checks if a log is still scheduled for incremental backup
  before deleting it when its TTL is over.</div>
@@ -277,7 +277,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.51">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.52">LOG</a></pre>
 </li>
 </ul>
 <a name="stopped">
@@ -286,7 +286,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockList">
 <li class="blockList">
 <h4>stopped</h4>
-<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.53">stopped</a></pre>
+<pre>private&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.54">stopped</a></pre>
 </li>
 </ul>
 <a name="conn">
@@ -295,7 +295,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>conn</h4>
-<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.54">conn</a></pre>
+<pre>private&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.55">conn</a></pre>
 </li>
 </ul>
 </li>
@@ -312,7 +312,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>BackupLogCleaner</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.56">BackupLogCleaner</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.57">BackupLogCleaner</a>()</pre>
 </li>
 </ul>
 </li>
@@ -329,7 +329,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockList">
 <li class="blockList">
 <h4>init</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.60">init</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&gt;&nbsp;params)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.61">init</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&gt;&nbsp;params)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/FileCleanerDelegate.html#init-java.util.Map-">FileCleanerDelegate</a></code></span></div>
 <div class="block">this method is used to pass some instance into subclass</div>
 <dl>
@@ -346,7 +346,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockList">
 <li class="blockList">
 <h4>getDeletableFiles</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.79">getDeletableFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;files)</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.80">getDeletableFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;org.apache.hadoop.fs.FileStatus&gt;&nbsp;files)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/FileCleanerDelegate.html#getDeletableFiles-java.lang.Iterable-">FileCleanerDelegate</a></code></span></div>
 <div class="block">Determines which of the given files are safe to delete</div>
 <dl>
@@ -367,7 +367,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockList">
 <li class="blockList">
 <h4>setConf</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.122">setConf</a>(org.apache.hadoop.conf.Configuration&nbsp;config)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.123">setConf</a>(org.apache.hadoop.conf.Configuration&nbsp;config)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code>setConf</code>&nbsp;in interface&nbsp;<code>org.apache.hadoop.conf.Configurable</code></dd>
@@ -382,7 +382,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockList">
 <li class="blockList">
 <h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.132">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.133">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/Stoppable.html#stop-java.lang.String-">Stoppable</a></code></span></div>
 <div class="block">Stop this service.
  Implementers should favor logging errors over throwing RuntimeExceptions.</div>
@@ -398,7 +398,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/master/cleaner/BaseLo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isStopped</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.140">isStopped</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html#line.141">isStopped</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>True if <a href="../../../../../../org/apache/hadoop/hbase/Stoppable.html#stop-java.lang.String-"><code>Stoppable.stop(String)</code></a> has been closed.</dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 249def3..a9ef737 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -167,10 +167,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/client/RowMutations.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RowMutations.html b/devapidocs/org/apache/hadoop/hbase/client/RowMutations.html
index abbdd77..c61c54b 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RowMutations.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RowMutations.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Public
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.41">RowMutations</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.42">RowMutations</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" title="interface in org.apache.hadoop.hbase.client">Row</a></pre>
 <div class="block">Performs multiple mutations atomically on a single row.
@@ -299,7 +299,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" titl
 <ul class="blockList">
 <li class="blockList">
 <h4>mutations</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.57">mutations</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.58">mutations</a></pre>
 </li>
 </ul>
 <a name="row">
@@ -308,7 +308,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" titl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>row</h4>
-<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.58">row</a></pre>
+<pre>private final&nbsp;byte[] <a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.59">row</a></pre>
 </li>
 </ul>
 </li>
@@ -325,7 +325,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" titl
 <ul class="blockList">
 <li class="blockList">
 <h4>RowMutations</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.60">RowMutations</a>(byte[]&nbsp;row)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.61">RowMutations</a>(byte[]&nbsp;row)</pre>
 </li>
 </ul>
 <a name="RowMutations-byte:A-int-">
@@ -334,7 +334,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" titl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RowMutations</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.68">RowMutations</a>(byte[]&nbsp;row,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.69">RowMutations</a>(byte[]&nbsp;row,
                     int&nbsp;initialCapacity)</pre>
 <div class="block">Create an atomic mutation for the specified row.</div>
 <dl>
@@ -358,7 +358,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" titl
 <ul class="blockList">
 <li class="blockList">
 <h4>of</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.49">of</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;mutations)
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.50">of</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;mutations)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Create a <a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client"><code>RowMutations</code></a> with the specified mutations.</div>
 <dl>
@@ -378,7 +378,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" titl
 <li class="blockList">
 <h4>add</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.85">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client">Put</a>&nbsp;p)
+public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.86">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client">Put</a>&nbsp;p)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">since 2.0 version and will be removed in 3.0 version.
              use <a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html#add-org.apache.hadoop.hbase.client.Mutation-"><code>add(Mutation)</code></a></span></div>
@@ -398,7 +398,7 @@ public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/c
 <li class="blockList">
 <h4>add</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.97">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Delete.html" title="class in org.apache.hadoop.hbase.client">Delete</a>&nbsp;d)
+public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.98">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Delete.html" title="class in org.apache.hadoop.hbase.client">Delete</a>&nbsp;d)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">since 2.0 version and will be removed in 3.0 version.
              use <a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html#add-org.apache.hadoop.hbase.client.Mutation-"><code>add(Mutation)</code></a></span></div>
@@ -417,7 +417,7 @@ public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/c
 <ul class="blockList">
 <li class="blockList">
 <h4>add</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.107">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&nbsp;mutation)
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.108">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&nbsp;mutation)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Currently only supports <a href="../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client"><code>Put</code></a> and <a href="../../../../../org/apache/hadoop/hbase/client/Delete.html" title="class in org.apache.hadoop.hbase.client"><code>Delete</code></a> mutations.</div>
 <dl>
@@ -434,7 +434,7 @@ public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/c
 <ul class="blockList">
 <li class="blockList">
 <h4>add</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.117">add</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;mutations)
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.118">add</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;mutations)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Currently only supports <a href="../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client"><code>Put</code></a> and <a href="../../../../../org/apache/hadoop/hbase/client/Delete.html" title="class in org.apache.hadoop.hbase.client"><code>Delete</code></a> mutations.</div>
 <dl>
@@ -452,7 +452,7 @@ public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/c
 <li class="blockList">
 <h4>compareTo</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.135">compareTo</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Row.html" title="interface in org.apache.hadoop.hbase.client">Row</a>&nbsp;i)</pre>
+public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.136">compareTo</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Row.html" title="interface in org.apache.hadoop.hbase.client">Row</a>&nbsp;i)</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">As of release 2.0.0, this will be removed in HBase 3.0.0.
              Use <a href="../../../../../org/apache/hadoop/hbase/client/Row.html#COMPARATOR"><code>Row.COMPARATOR</code></a> instead</span></div>
 <dl>
@@ -470,7 +470,7 @@ public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/cl
 <li class="blockList">
 <h4>equals</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.145">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;obj)</pre>
+public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.146">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;obj)</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">As of release 2.0.0, this will be removed in HBase 3.0.0.
              No replacement</span></div>
 <dl>
@@ -486,7 +486,7 @@ public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbas
 <li class="blockList">
 <h4>hashCode</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.160">hashCode</a>()</pre>
+public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.161">hashCode</a>()</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">As of release 2.0.0, this will be removed in HBase 3.0.0.
              No replacement</span></div>
 <dl>
@@ -501,7 +501,7 @@ public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/cl
 <ul class="blockList">
 <li class="blockList">
 <h4>getRow</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.165">getRow</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.166">getRow</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/client/Row.html#getRow--">getRow</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/client/Row.html" title="interface in org.apache.hadoop.hbase.client">Row</a></code></dd>
@@ -516,7 +516,7 @@ public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/cl
 <ul class="blockList">
 <li class="blockList">
 <h4>getMutations</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.172">getMutations</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.173">getMutations</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>An unmodifiable list of the current mutations.</dd>
@@ -529,7 +529,7 @@ public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/cl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMaxPriority</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.176">getMaxPriority</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.177">getMaxPriority</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index ad8d01b..e6e17f6 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -550,23 +550,23 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/ScannerCallable.MoreResults.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">ScannerCallable.MoreResults</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/ScannerCallable.MoreResults.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">ScannerCallable.MoreResults</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
index ce950b0..ae0124e 100644
--- a/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/executor/package-tree.html
@@ -104,8 +104,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/EventType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">EventType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/ExecutorType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">ExecutorType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.executor.<a href="../../../../../org/apache/hadoop/hbase/executor/EventType.html" title="enum in org.apache.hadoop.hbase.executor"><span class="typeNameLink">EventType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index ff7405b..f93eaa3 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -183,14 +183,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">CompareFilter.CompareOp</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">CompareFilter.CompareOp</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index 54d2030..09bf819 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -273,12 +273,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">CacheConfig.ExternalBlockCaches</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Cacheable.MemoryType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Cacheable.MemoryType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">CacheConfig.ExternalBlockCaches</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 7600159..a6039ad 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -350,8 +350,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index a07a1d8..6ec2fb5 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -293,10 +293,10 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
index ad40e69..eb945fb 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
@@ -197,8 +197,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.balancer.<a href="../../../../../../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html" title="enum in org.apache.hadoop.hbase.master.balancer"><span class="typeNameLink">BaseLoadBalancer.Cluster.LocalityType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.balancer.<a href="../../../../../../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html" title="enum in org.apache.hadoop.hbase.master.balancer"><span class="typeNameLink">BaseLoadBalancer.Cluster.Action.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.balancer.<a href="../../../../../../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html" title="enum in org.apache.hadoop.hbase.master.balancer"><span class="typeNameLink">BaseLoadBalancer.Cluster.LocalityType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 79c34d2..0185fb6 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -347,10 +347,10 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 2d4a29c..0fa081d 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -208,9 +208,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html b/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
index 5615a3c..0b0f483 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>class <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.214">ClientZKSyncer.ClientZkUpdater</a>
+<pre>class <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.215">ClientZKSyncer.ClientZkUpdater</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true" title="class or interface in java.lang">Thread</a></pre>
 <div class="block">Thread to synchronize znode data to client ZK cluster</div>
 </li>
@@ -245,7 +245,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockList">
 <li class="blockList">
 <h4>znode</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html#line.215">znode</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html#line.216">znode</a></pre>
 </li>
 </ul>
 <a name="queue">
@@ -254,7 +254,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>queue</h4>
-<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;byte[]&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html#line.216">queue</a></pre>
+<pre>final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;byte[]&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html#line.217">queue</a></pre>
 </li>
 </ul>
 </li>
@@ -271,7 +271,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ClientZkUpdater</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html#line.218">ClientZkUpdater</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html#line.219">ClientZkUpdater</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;znode,
                        <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;byte[]&gt;&nbsp;queue)</pre>
 </li>
 </ul>
@@ -289,7 +289,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html#line.225">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html#line.226">run</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>


[23/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html b/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
index 9c27524..9f8cca5 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public abstract class <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.47">ClientZKSyncer</a>
+public abstract class <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.48">ClientZKSyncer</a>
 extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKListener</a></pre>
 <div class="block">Tracks the target znode(s) on server ZK cluster and synchronize them to client ZK cluster if
  changed
@@ -169,7 +169,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#clientZkWatcher">clientZkWatcher</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
@@ -312,7 +312,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.48">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.49">LOG</a></pre>
 </li>
 </ul>
 <a name="server">
@@ -321,7 +321,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>server</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.49">server</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.50">server</a></pre>
 </li>
 </ul>
 <a name="clientZkWatcher">
@@ -330,7 +330,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>clientZkWatcher</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.50">clientZkWatcher</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.51">clientZkWatcher</a></pre>
 </li>
 </ul>
 <a name="queues">
@@ -339,7 +339,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>queues</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;byte[]&gt;&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.53">queues</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true" title="class or interface in java.util.concurrent">BlockingQueue</a>&lt;byte[]&gt;&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.54">queues</a></pre>
 </li>
 </ul>
 </li>
@@ -356,7 +356,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ClientZKSyncer</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.55">ClientZKSyncer</a>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;watcher,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.56">ClientZKSyncer</a>(<a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;watcher,
                       <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKWatcher.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKWatcher</a>&nbsp;clientZkWatcher,
                       <a href="../../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a>&nbsp;server)</pre>
 </li>
@@ -375,7 +375,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>start</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.66">start</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.67">start</a>()
            throws org.apache.zookeeper.KeeperException</pre>
 <div class="block">Starts the syncer</div>
 <dl>
@@ -390,7 +390,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>watchAndCheckExists</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.85">watchAndCheckExists</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.86">watchAndCheckExists</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node)</pre>
 </li>
 </ul>
 <a name="upsertQueue-java.lang.String-byte:A-">
@@ -399,7 +399,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>upsertQueue</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.113">upsertQueue</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.114">upsertQueue</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node,
                          byte[]&nbsp;data)</pre>
 <div class="block">Update the value of the single element in queue if any, or else insert.
  <p/>
@@ -417,7 +417,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>setDataForClientZkUntilSuccess</h4>
-<pre>private final&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.128">setDataForClientZkUntilSuccess</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node,
+<pre>private final&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.129">setDataForClientZkUntilSuccess</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;node,
                                                   byte[]&nbsp;data)
                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Set data for client ZK and retry until succeed. Be very careful to prevent dead loop when
@@ -437,7 +437,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>reconnectAfterExpiration</h4>
-<pre>private final&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.157">reconnectAfterExpiration</a>()
+<pre>private final&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.158">reconnectAfterExpiration</a>()
                                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -451,7 +451,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>nodeCreated</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.167">nodeCreated</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.168">nodeCreated</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.html#nodeCreated-java.lang.String-">ZKListener</a></code></span></div>
 <div class="block">Called when a new node has been created.</div>
 <dl>
@@ -468,7 +468,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>nodeDataChanged</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.180">nodeDataChanged</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.181">nodeDataChanged</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.html#nodeDataChanged-java.lang.String-">ZKListener</a></code></span></div>
 <div class="block">Called when an existing node has changed data.</div>
 <dl>
@@ -485,7 +485,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>nodeDeleted</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.187">nodeDeleted</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.188">nodeDeleted</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.html#nodeDeleted-java.lang.String-">ZKListener</a></code></span></div>
 <div class="block">Called when a node has been deleted</div>
 <dl>
@@ -502,7 +502,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockList">
 <li class="blockList">
 <h4>validate</h4>
-<pre>abstract&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.204">validate</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
+<pre>abstract&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.205">validate</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;path)</pre>
 <div class="block">Validate whether a znode path is watched by us</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -518,7 +518,7 @@ extends <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZKListener.
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getNodesToWatch</h4>
-<pre>abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.209">getNodesToWatch</a>()</pre>
+<pre>abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html#line.210">getNodesToWatch</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the znode(s) to watch</dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 8f95e1e..3455858 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -441,19 +441,19 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index f58a1c3..31c8440 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -212,11 +212,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html b/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
index 4082b4f..0183adf 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
@@ -127,7 +127,7 @@
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.87">FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException</a>
+public static class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.88">FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">An Exception thrown when SnapshotSize updates to hbase:quota fail to be written.</div>
 <dl>
@@ -216,7 +216,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>serialVersionUID</h4>
-<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html#line.88">serialVersionUID</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html#line.89">serialVersionUID</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.quotas.FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.serialVersionUID">Constant Field Values</a></dd>
@@ -237,7 +237,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>QuotaSnapshotSizeSerializationException</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html#line.90">QuotaSnapshotSizeSerializationException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msg)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html#line.91">QuotaSnapshotSizeSerializationException</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msg)</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html b/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
index 10da3a8..cd4153a 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.542">FileArchiverNotifierImpl.SnapshotWithSize</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.543">FileArchiverNotifierImpl.SnapshotWithSize</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">A struct encapsulating the name of a snapshot and its "size" on the filesystem. This size is
  defined as the amount of filesystem space taken by the files the snapshot refers to which
@@ -225,7 +225,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>name</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.543">name</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.544">name</a></pre>
 </li>
 </ul>
 <a name="size">
@@ -234,7 +234,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>size</h4>
-<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.544">size</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.545">size</a></pre>
 </li>
 </ul>
 </li>
@@ -251,7 +251,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SnapshotWithSize</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.546">SnapshotWithSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.547">SnapshotWithSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name,
                  long&nbsp;size)</pre>
 </li>
 </ul>
@@ -269,7 +269,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.551">getName</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.552">getName</a>()</pre>
 </li>
 </ul>
 <a name="getSize--">
@@ -278,7 +278,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getSize</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.555">getSize</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.556">getSize</a>()</pre>
 </li>
 </ul>
 <a name="hashCode--">
@@ -287,7 +287,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hashCode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.560">hashCode</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.561">hashCode</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -300,7 +300,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.565">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.566">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -313,7 +313,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.579">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html#line.580">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html b/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
index ae8242e..3685f5d 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.589">FileArchiverNotifierImpl.StoreFileReference</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.590">FileArchiverNotifierImpl.StoreFileReference</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">A reference to a collection of files in the archive directory for a single region.</div>
 </li>
@@ -227,7 +227,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>regionName</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.590">regionName</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.591">regionName</a></pre>
 </li>
 </ul>
 <a name="familyToFiles">
@@ -236,7 +236,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>familyToFiles</h4>
-<pre>private final&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.591">familyToFiles</a></pre>
+<pre>private final&nbsp;org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.592">familyToFiles</a></pre>
 </li>
 </ul>
 </li>
@@ -253,7 +253,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>StoreFileReference</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.593">StoreFileReference</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;regionName)</pre>
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.594">StoreFileReference</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;regionName)</pre>
 </li>
 </ul>
 </li>
@@ -270,7 +270,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionName</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.598">getRegionName</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.599">getRegionName</a>()</pre>
 </li>
 </ul>
 <a name="getFamilyToFilesMapping--">
@@ -279,7 +279,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getFamilyToFilesMapping</h4>
-<pre>org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.602">getFamilyToFilesMapping</a>()</pre>
+<pre>org.apache.hbase.thirdparty.com.google.common.collect.Multimap&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.603">getFamilyToFilesMapping</a>()</pre>
 </li>
 </ul>
 <a name="addFamilyStoreFile-java.lang.String-java.lang.String-">
@@ -288,7 +288,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>addFamilyStoreFile</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.606">addFamilyStoreFile</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.607">addFamilyStoreFile</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;storeFileName)</pre>
 </li>
 </ul>
@@ -298,7 +298,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>hashCode</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.611">hashCode</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.612">hashCode</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--" title="class or interface in java.lang">hashCode</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -311,7 +311,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>equals</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.616">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.617">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;o)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-" title="class or interface in java.lang">equals</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -324,7 +324,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.628">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html#line.629">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html b/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
index 15d4451..86ba3f0 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.71">FileArchiverNotifierImpl</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.72">FileArchiverNotifierImpl</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifier.html" title="interface in org.apache.hadoop.hbase.quotas">FileArchiverNotifier</a></pre>
 <div class="block">Tracks file archiving and updates the hbase quota table.</div>
@@ -190,7 +190,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#lastFullCompute">lastFullCompute</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
@@ -379,7 +379,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.72">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.73">LOG</a></pre>
 </li>
 </ul>
 <a name="conn">
@@ -388,7 +388,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>conn</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.73">conn</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.74">conn</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -397,7 +397,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.74">conf</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.75">conf</a></pre>
 </li>
 </ul>
 <a name="fs">
@@ -406,7 +406,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>fs</h4>
-<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.75">fs</a></pre>
+<pre>private final&nbsp;org.apache.hadoop.fs.FileSystem <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.76">fs</a></pre>
 </li>
 </ul>
 <a name="tn">
@@ -415,7 +415,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>tn</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.76">tn</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.77">tn</a></pre>
 </li>
 </ul>
 <a name="readLock">
@@ -424,7 +424,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>readLock</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.ReadLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantReadWriteLock.ReadLock</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.77">readLock</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.ReadLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantReadWriteLock.ReadLock</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.78">readLock</a></pre>
 </li>
 </ul>
 <a name="writeLock">
@@ -433,7 +433,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>writeLock</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.WriteLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantReadWriteLock.WriteLock</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.78">writeLock</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.WriteLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantReadWriteLock.WriteLock</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.79">writeLock</a></pre>
 </li>
 </ul>
 <a name="lastFullCompute">
@@ -442,7 +442,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>lastFullCompute</h4>
-<pre>private volatile&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.79">lastFullCompute</a></pre>
+<pre>private volatile&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.80">lastFullCompute</a></pre>
 </li>
 </ul>
 <a name="currentSnapshots">
@@ -451,7 +451,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>currentSnapshots</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.80">currentSnapshots</a></pre>
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.81">currentSnapshots</a></pre>
 </li>
 </ul>
 <a name="NAMESPACE_LOCKS">
@@ -460,7 +460,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>NAMESPACE_LOCKS</h4>
-<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.81">NAMESPACE_LOCKS</a></pre>
+<pre>private static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.82">NAMESPACE_LOCKS</a></pre>
 </li>
 </ul>
 </li>
@@ -477,7 +477,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FileArchiverNotifierImpl</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.95">FileArchiverNotifierImpl</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.96">FileArchiverNotifierImpl</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;conn,
                                 org.apache.hadoop.conf.Configuration&nbsp;conf,
                                 org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                 <a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tn)</pre>
@@ -497,7 +497,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>getLockForNamespace</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.106">getLockForNamespace</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;namespace)</pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.107">getLockForNamespace</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;namespace)</pre>
 </li>
 </ul>
 <a name="getLastFullCompute--">
@@ -506,7 +506,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastFullCompute</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.113">getLastFullCompute</a>()</pre>
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.114">getLastFullCompute</a>()</pre>
 <div class="block">Returns a strictly-increasing measure of time extracted by <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/System.html?is-external=true#nanoTime--" title="class or interface in java.lang"><code>System.nanoTime()</code></a>.</div>
 </li>
 </ul>
@@ -516,7 +516,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>addArchivedFiles</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.118">addArchivedFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;fileSizes)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.119">addArchivedFiles</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;fileSizes)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifier.html#addArchivedFiles-java.util.Set-">FileArchiverNotifier</a></code></span></div>
 <div class="block">Records a file and its size in bytes being moved to the archive directory.</div>
@@ -536,7 +536,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>groupArchivedFiledBySnapshotAndRecordSize</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.158">groupArchivedFiledBySnapshotAndRecordSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;snapshots,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.159">groupArchivedFiledBySnapshotAndRecordSize</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;snapshots,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&gt;&nbsp;fileSizes)
                                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">For each file in the map, this updates the first snapshot (lexicographic snapshot name) that
@@ -556,7 +556,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>bucketFilesToSnapshot</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.194">bucketFilesToSnapshot</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;snapshotName,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.195">bucketFilesToSnapshot</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;snapshotName,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;filesToUpdate,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;snapshotSizeChanges)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -579,7 +579,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>persistSnapshotSizeChanges</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.233">persistSnapshotSizeChanges</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;snapshotSizeChanges)
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.234">persistSnapshotSizeChanges</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&gt;&nbsp;snapshotSizeChanges)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Reads the current size for each snapshot to update, generates a new update based on that value,
  and then writes the new update.</div>
@@ -597,7 +597,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>getPreviousNamespaceSnapshotSize</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.311">getPreviousNamespaceSnapshotSize</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a>&nbsp;quotaTable,
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.312">getPreviousNamespaceSnapshotSize</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a>&nbsp;quotaTable,
                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;namespace)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Fetches the current size of all snapshots in the given <code>namespace</code>.</div>
@@ -618,7 +618,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>getSnapshotSizeFromResult</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.324">getSnapshotSizeFromResult</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a>&nbsp;r)
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.325">getSnapshotSizeFromResult</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Result.html" title="class in org.apache.hadoop.hbase.client">Result</a>&nbsp;r)
                         throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException</pre>
 <div class="block">Extracts the size component from a serialized <a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceQuotaSnapshot.html" title="class in org.apache.hadoop.hbase.quotas"><code>SpaceQuotaSnapshot</code></a> protobuf.</div>
 <dl>
@@ -637,7 +637,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>computeAndStoreSnapshotSizes</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.334">computeAndStoreSnapshotSizes</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;currentSnapshots)
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.335">computeAndStoreSnapshotSizes</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;currentSnapshots)
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifier.html#computeAndStoreSnapshotSizes-java.util.Collection-">FileArchiverNotifier</a></code></span></div>
 <div class="block">Computes the size of a table and all of its snapshots, recording new "full" sizes for each.</div>
@@ -659,7 +659,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.366">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.367">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -672,7 +672,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>computeSnapshotSizes</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html" title="class in org.apache.hadoop.hbase.quotas">FileArchiverNotifierImpl.SnapshotWithSize</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.380">computeSnapshotSizes</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;snapshots)
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html" title="class in org.apache.hadoop.hbase.quotas">FileArchiverNotifierImpl.SnapshotWithSize</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.381">computeSnapshotSizes</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;snapshots)
                                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Computes the size of each snapshot against the table referenced by <code>this</code>.</div>
 <dl>
@@ -691,7 +691,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>getSizeOfStoreFiles</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.443">getSizeOfStoreFiles</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tn,
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.444">getSizeOfStoreFiles</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tn,
                          <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html" title="class in org.apache.hadoop.hbase.quotas">FileArchiverNotifierImpl.StoreFileReference</a>&gt;&nbsp;storeFileNames)</pre>
 <div class="block">Computes the size of each store file in <code>storeFileNames</code></div>
 </li>
@@ -702,7 +702,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>getSizeOfStoreFile</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.451">getSizeOfStoreFile</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tn,
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.452">getSizeOfStoreFile</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tn,
                         <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html" title="class in org.apache.hadoop.hbase.quotas">FileArchiverNotifierImpl.StoreFileReference</a>&nbsp;storeFileName)</pre>
 <div class="block">Computes the size of the store files for a single region.</div>
 </li>
@@ -713,7 +713,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>getSizeOfStoreFile</h4>
-<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.463">getSizeOfStoreFile</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tn,
+<pre>long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.464">getSizeOfStoreFile</a>(<a href="../../../../../org/apache/hadoop/hbase/TableName.html" title="class in org.apache.hadoop.hbase">TableName</a>&nbsp;tn,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;regionName,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;storeFile)</pre>
@@ -727,7 +727,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockList">
 <li class="blockList">
 <h4>getStoreFilesFromSnapshot</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html" title="class in org.apache.hadoop.hbase.quotas">FileArchiverNotifierImpl.StoreFileReference</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.495">getStoreFilesFromSnapshot</a>(<a href="../../../../../org/apache/hadoop/hbase/snapshot/SnapshotManifest.html" title="class in org.apache.hadoop.hbase.snapshot">SnapshotManifest</a>&nbsp;manifest,
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html" title="class in org.apache.hadoop.hbase.quotas">FileArchiverNotifierImpl.StoreFileReference</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.496">getStoreFilesFromSnapshot</a>(<a href="../../../../../org/apache/hadoop/hbase/snapshot/SnapshotManifest.html" title="class in org.apache.hadoop.hbase.snapshot">SnapshotManifest</a>&nbsp;manifest,
                                                                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true" title="class or interface in java.util.function">Predicate</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;filter)</pre>
 <div class="block">Extracts the names of the store files referenced by this snapshot which satisfy the given
  predicate (the predicate returns <code>true</code>).</div>
@@ -739,7 +739,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNo
 <ul class="blockListLast">
 <li class="blockList">
 <h4>persistSnapshotSizes</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.527">persistSnapshotSizes</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a>&nbsp;table,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html#line.528">persistSnapshotSizes</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Table.html" title="interface in org.apache.hadoop.hbase.client">Table</a>&nbsp;table,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html" title="class in org.apache.hadoop.hbase.quotas">FileArchiverNotifierImpl.SnapshotWithSize</a>&gt;&nbsp;snapshotSizes)
                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Writes the snapshot sizes to the provided <code>table</code>.</div>


[09/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
index 7d1dba6..11f9915 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
@@ -50,114 +50,114 @@
 <span class="sourceLineNo">042</span>import java.util.concurrent.atomic.AtomicBoolean;<a name="line.42"></a>
 <span class="sourceLineNo">043</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.atomic.LongAdder;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.commons.collections.CollectionUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.conf.Configuration;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.Path;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.Cell;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.Server;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.ServerName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.TableName;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.Append;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Get;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.Put;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Result;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.Row;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.net.Address;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.User;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.slf4j.Logger;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.slf4j.LoggerFactory;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.152"></a>
+<span class="sourceLineNo">045</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.conf.Configuration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.Cell;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HConstants;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.Server;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.ServerName;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.client.Append;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Get;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Put;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.Result;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Row;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.net.Address;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.User;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.slf4j.Logger;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.slf4j.LoggerFactory;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.152"></a>
 <span class="sourceLineNo">153</span><a name="line.153"></a>
 <span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.154"></a>
 <span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;<a name="line.155"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
index 7d1dba6..11f9915 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
@@ -50,114 +50,114 @@
 <span class="sourceLineNo">042</span>import java.util.concurrent.atomic.AtomicBoolean;<a name="line.42"></a>
 <span class="sourceLineNo">043</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.atomic.LongAdder;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.commons.collections.CollectionUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.conf.Configuration;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.Path;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.Cell;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.Server;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.ServerName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.TableName;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.Append;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Get;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.Put;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Result;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.Row;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.net.Address;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.User;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.slf4j.Logger;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.slf4j.LoggerFactory;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.152"></a>
+<span class="sourceLineNo">045</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.conf.Configuration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.Cell;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HConstants;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.Server;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.ServerName;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.client.Append;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Get;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Put;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.Result;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Row;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.net.Address;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.User;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.slf4j.Logger;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.slf4j.LoggerFactory;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.152"></a>
 <span class="sourceLineNo">153</span><a name="line.153"></a>
 <span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.154"></a>
 <span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;<a name="line.155"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
index 7d1dba6..11f9915 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
@@ -50,114 +50,114 @@
 <span class="sourceLineNo">042</span>import java.util.concurrent.atomic.AtomicBoolean;<a name="line.42"></a>
 <span class="sourceLineNo">043</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.atomic.LongAdder;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.commons.collections.CollectionUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.conf.Configuration;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.Path;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.Cell;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.Server;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.ServerName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.TableName;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.Append;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Get;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.Put;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Result;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.Row;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.net.Address;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.User;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.slf4j.Logger;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.slf4j.LoggerFactory;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.152"></a>
+<span class="sourceLineNo">045</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.conf.Configuration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.Cell;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HConstants;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.Server;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.ServerName;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.client.Append;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Get;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Put;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.Result;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Row;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.net.Address;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.User;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.slf4j.Logger;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.slf4j.LoggerFactory;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.152"></a>
 <span class="sourceLineNo">153</span><a name="line.153"></a>
 <span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.154"></a>
 <span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;<a name="line.155"></a>


[02/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
index c06f057..265908c 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
@@ -31,136 +31,137 @@
 <span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.util.List;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import java.util.Optional;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.commons.logging.Log;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.commons.logging.LogFactory;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.TableName;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.coprocessor.MasterObserver;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.coprocessor.ObserverContext;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.junit.After;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.junit.BeforeClass;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.junit.ClassRule;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.junit.Test;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.junit.experimental.categories.Category;<a name="line.46"></a>
-<span class="sourceLineNo">047</span><a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;<a name="line.48"></a>
-<span class="sourceLineNo">049</span><a name="line.49"></a>
-<span class="sourceLineNo">050</span>/**<a name="line.50"></a>
-<span class="sourceLineNo">051</span> * Check if CompletedProcedureCleaner cleans up failed nonce procedures.<a name="line.51"></a>
-<span class="sourceLineNo">052</span> */<a name="line.52"></a>
-<span class="sourceLineNo">053</span>@Category(MediumTests.class)<a name="line.53"></a>
-<span class="sourceLineNo">054</span>public class TestFailedProcCleanup {<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  @ClassRule<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.57"></a>
-<span class="sourceLineNo">058</span>      HBaseClassTestRule.forClass(TestFailedProcCleanup.class);<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  private static final Log LOG = LogFactory.getLog(TestFailedProcCleanup.class);<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>  protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  private static Configuration conf;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  private static final TableName TABLE = TableName.valueOf("test");<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  private static final byte[] FAMILY = Bytes.toBytesBinary("f");<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  private static final int evictionDelay = 10 * 1000;<a name="line.66"></a>
-<span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  @BeforeClass<a name="line.68"></a>
-<span class="sourceLineNo">069</span>  public static void setUpBeforeClass() {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    conf = TEST_UTIL.getConfiguration();<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    conf.setInt("hbase.procedure.cleaner.evict.ttl", evictionDelay);<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    conf.setInt("hbase.procedure.cleaner.evict.batch.size", 1);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  }<a name="line.73"></a>
-<span class="sourceLineNo">074</span><a name="line.74"></a>
-<span class="sourceLineNo">075</span>  @After<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  public void tearDown() throws Exception {<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>  @Test<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  public void testFailCreateTable() throws Exception {<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserver.class.getName());<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    TEST_UTIL.startMiniCluster(3);<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    try {<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    } catch (AccessDeniedException e) {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      LOG.debug("Ignoring exception: ", e);<a name="line.87"></a>
-<span class="sourceLineNo">088</span>      Thread.sleep(evictionDelay * 3);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    }<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.90"></a>
-<span class="sourceLineNo">091</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.92"></a>
-<span class="sourceLineNo">093</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.93"></a>
-<span class="sourceLineNo">094</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.94"></a>
-<span class="sourceLineNo">095</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.95"></a>
-<span class="sourceLineNo">096</span>      }<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    }<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  }<a name="line.98"></a>
-<span class="sourceLineNo">099</span><a name="line.99"></a>
-<span class="sourceLineNo">100</span>  @Test<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  public void testFailCreateTableAction() throws Exception {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserverHandler.class.getName());<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    TEST_UTIL.startMiniCluster(3);<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    try {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      fail("Table shouldn't be created");<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    } catch (AccessDeniedException e) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      LOG.debug("Ignoring exception: ", e);<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      Thread.sleep(evictionDelay * 3);<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    }<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.111"></a>
-<span class="sourceLineNo">112</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.114"></a>
-<span class="sourceLineNo">115</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      }<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  public static class CreateFailObserver implements MasterCoprocessor, MasterObserver {<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>    @Override<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    public void preCreateTable(ObserverContext&lt;MasterCoprocessorEnvironment&gt; env,<a name="line.124"></a>
-<span class="sourceLineNo">125</span>        TableDescriptor desc, RegionInfo[] regions) throws IOException {<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    }<a name="line.130"></a>
-<span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>    @Override<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      return Optional.of(this);<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    }<a name="line.135"></a>
-<span class="sourceLineNo">136</span>  }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static class CreateFailObserverHandler implements MasterCoprocessor, MasterObserver {<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>    @Override<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    public void preCreateTableAction(<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        final ObserverContext&lt;MasterCoprocessorEnvironment&gt; ctx, final TableDescriptor desc,<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        final RegionInfo[] regions) throws IOException {<a name="line.143"></a>
-<span class="sourceLineNo">144</span><a name="line.144"></a>
-<span class="sourceLineNo">145</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      }<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    @Override<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      return Optional.of(this);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    }<a name="line.153"></a>
-<span class="sourceLineNo">154</span>  }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>}<a name="line.155"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.conf.Configuration;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.TableName;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.coprocessor.MasterObserver;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.coprocessor.ObserverContext;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.junit.After;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.junit.BeforeClass;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.junit.ClassRule;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.junit.Test;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.junit.experimental.categories.Category;<a name="line.44"></a>
+<span class="sourceLineNo">045</span><a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.slf4j.Logger;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.slf4j.LoggerFactory;<a name="line.47"></a>
+<span class="sourceLineNo">048</span><a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>/**<a name="line.51"></a>
+<span class="sourceLineNo">052</span> * Check if CompletedProcedureCleaner cleans up failed nonce procedures.<a name="line.52"></a>
+<span class="sourceLineNo">053</span> */<a name="line.53"></a>
+<span class="sourceLineNo">054</span>@Category(MediumTests.class)<a name="line.54"></a>
+<span class="sourceLineNo">055</span>public class TestFailedProcCleanup {<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  @ClassRule<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.58"></a>
+<span class="sourceLineNo">059</span>      HBaseClassTestRule.forClass(TestFailedProcCleanup.class);<a name="line.59"></a>
+<span class="sourceLineNo">060</span><a name="line.60"></a>
+<span class="sourceLineNo">061</span>  private static final Logger LOG = LoggerFactory.getLogger(TestFailedProcCleanup.class);<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  private static Configuration conf;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private static final TableName TABLE = TableName.valueOf("test");<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  private static final byte[] FAMILY = Bytes.toBytesBinary("f");<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  private static final int evictionDelay = 10 * 1000;<a name="line.67"></a>
+<span class="sourceLineNo">068</span><a name="line.68"></a>
+<span class="sourceLineNo">069</span>  @BeforeClass<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  public static void setUpBeforeClass() {<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    conf = TEST_UTIL.getConfiguration();<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    conf.setInt("hbase.procedure.cleaner.evict.ttl", evictionDelay);<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    conf.setInt("hbase.procedure.cleaner.evict.batch.size", 1);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  }<a name="line.74"></a>
+<span class="sourceLineNo">075</span><a name="line.75"></a>
+<span class="sourceLineNo">076</span>  @After<a name="line.76"></a>
+<span class="sourceLineNo">077</span>  public void tearDown() throws Exception {<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  @Test<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public void testFailCreateTable() throws Exception {<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserver.class.getName());<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    TEST_UTIL.startMiniCluster(3);<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    try {<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    } catch (AccessDeniedException e) {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      LOG.debug("Ignoring exception: ", e);<a name="line.88"></a>
+<span class="sourceLineNo">089</span>      Thread.sleep(evictionDelay * 3);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    }<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.93"></a>
+<span class="sourceLineNo">094</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.94"></a>
+<span class="sourceLineNo">095</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      }<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  @Test<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  public void testFailCreateTableAction() throws Exception {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserverHandler.class.getName());<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    TEST_UTIL.startMiniCluster(3);<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    try {<a name="line.105"></a>
+<span class="sourceLineNo">106</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>      fail("Table shouldn't be created");<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    } catch (AccessDeniedException e) {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      LOG.debug("Ignoring exception: ", e);<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      Thread.sleep(evictionDelay * 3);<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
+<span class="sourceLineNo">112</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.112"></a>
+<span class="sourceLineNo">113</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.115"></a>
+<span class="sourceLineNo">116</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      }<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static class CreateFailObserver implements MasterCoprocessor, MasterObserver {<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>    @Override<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    public void preCreateTable(ObserverContext&lt;MasterCoprocessorEnvironment&gt; env,<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        TableDescriptor desc, RegionInfo[] regions) throws IOException {<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.128"></a>
+<span class="sourceLineNo">129</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      }<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    }<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>    @Override<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      return Optional.of(this);<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  public static class CreateFailObserverHandler implements MasterCoprocessor, MasterObserver {<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>    @Override<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    public void preCreateTableAction(<a name="line.142"></a>
+<span class="sourceLineNo">143</span>        final ObserverContext&lt;MasterCoprocessorEnvironment&gt; ctx, final TableDescriptor desc,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        final RegionInfo[] regions) throws IOException {<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      }<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    }<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>    @Override<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      return Optional.of(this);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
+<span class="sourceLineNo">156</span>}<a name="line.156"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html
index c06f057..265908c 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserverHandler.html
@@ -31,136 +31,137 @@
 <span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.util.List;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import java.util.Optional;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.commons.logging.Log;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.commons.logging.LogFactory;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.TableName;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.coprocessor.MasterObserver;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.coprocessor.ObserverContext;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.junit.After;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.junit.BeforeClass;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.junit.ClassRule;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.junit.Test;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.junit.experimental.categories.Category;<a name="line.46"></a>
-<span class="sourceLineNo">047</span><a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;<a name="line.48"></a>
-<span class="sourceLineNo">049</span><a name="line.49"></a>
-<span class="sourceLineNo">050</span>/**<a name="line.50"></a>
-<span class="sourceLineNo">051</span> * Check if CompletedProcedureCleaner cleans up failed nonce procedures.<a name="line.51"></a>
-<span class="sourceLineNo">052</span> */<a name="line.52"></a>
-<span class="sourceLineNo">053</span>@Category(MediumTests.class)<a name="line.53"></a>
-<span class="sourceLineNo">054</span>public class TestFailedProcCleanup {<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  @ClassRule<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.57"></a>
-<span class="sourceLineNo">058</span>      HBaseClassTestRule.forClass(TestFailedProcCleanup.class);<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  private static final Log LOG = LogFactory.getLog(TestFailedProcCleanup.class);<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>  protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  private static Configuration conf;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  private static final TableName TABLE = TableName.valueOf("test");<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  private static final byte[] FAMILY = Bytes.toBytesBinary("f");<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  private static final int evictionDelay = 10 * 1000;<a name="line.66"></a>
-<span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  @BeforeClass<a name="line.68"></a>
-<span class="sourceLineNo">069</span>  public static void setUpBeforeClass() {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    conf = TEST_UTIL.getConfiguration();<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    conf.setInt("hbase.procedure.cleaner.evict.ttl", evictionDelay);<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    conf.setInt("hbase.procedure.cleaner.evict.batch.size", 1);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  }<a name="line.73"></a>
-<span class="sourceLineNo">074</span><a name="line.74"></a>
-<span class="sourceLineNo">075</span>  @After<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  public void tearDown() throws Exception {<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>  @Test<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  public void testFailCreateTable() throws Exception {<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserver.class.getName());<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    TEST_UTIL.startMiniCluster(3);<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    try {<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    } catch (AccessDeniedException e) {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      LOG.debug("Ignoring exception: ", e);<a name="line.87"></a>
-<span class="sourceLineNo">088</span>      Thread.sleep(evictionDelay * 3);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    }<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.90"></a>
-<span class="sourceLineNo">091</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.92"></a>
-<span class="sourceLineNo">093</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.93"></a>
-<span class="sourceLineNo">094</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.94"></a>
-<span class="sourceLineNo">095</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.95"></a>
-<span class="sourceLineNo">096</span>      }<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    }<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  }<a name="line.98"></a>
-<span class="sourceLineNo">099</span><a name="line.99"></a>
-<span class="sourceLineNo">100</span>  @Test<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  public void testFailCreateTableAction() throws Exception {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserverHandler.class.getName());<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    TEST_UTIL.startMiniCluster(3);<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    try {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      fail("Table shouldn't be created");<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    } catch (AccessDeniedException e) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      LOG.debug("Ignoring exception: ", e);<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      Thread.sleep(evictionDelay * 3);<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    }<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.111"></a>
-<span class="sourceLineNo">112</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.114"></a>
-<span class="sourceLineNo">115</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      }<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  public static class CreateFailObserver implements MasterCoprocessor, MasterObserver {<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>    @Override<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    public void preCreateTable(ObserverContext&lt;MasterCoprocessorEnvironment&gt; env,<a name="line.124"></a>
-<span class="sourceLineNo">125</span>        TableDescriptor desc, RegionInfo[] regions) throws IOException {<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    }<a name="line.130"></a>
-<span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>    @Override<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      return Optional.of(this);<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    }<a name="line.135"></a>
-<span class="sourceLineNo">136</span>  }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static class CreateFailObserverHandler implements MasterCoprocessor, MasterObserver {<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>    @Override<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    public void preCreateTableAction(<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        final ObserverContext&lt;MasterCoprocessorEnvironment&gt; ctx, final TableDescriptor desc,<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        final RegionInfo[] regions) throws IOException {<a name="line.143"></a>
-<span class="sourceLineNo">144</span><a name="line.144"></a>
-<span class="sourceLineNo">145</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      }<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    @Override<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      return Optional.of(this);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    }<a name="line.153"></a>
-<span class="sourceLineNo">154</span>  }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>}<a name="line.155"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.conf.Configuration;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.TableName;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.coprocessor.MasterObserver;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.coprocessor.ObserverContext;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.junit.After;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.junit.BeforeClass;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.junit.ClassRule;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.junit.Test;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.junit.experimental.categories.Category;<a name="line.44"></a>
+<span class="sourceLineNo">045</span><a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.slf4j.Logger;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.slf4j.LoggerFactory;<a name="line.47"></a>
+<span class="sourceLineNo">048</span><a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>/**<a name="line.51"></a>
+<span class="sourceLineNo">052</span> * Check if CompletedProcedureCleaner cleans up failed nonce procedures.<a name="line.52"></a>
+<span class="sourceLineNo">053</span> */<a name="line.53"></a>
+<span class="sourceLineNo">054</span>@Category(MediumTests.class)<a name="line.54"></a>
+<span class="sourceLineNo">055</span>public class TestFailedProcCleanup {<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  @ClassRule<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.58"></a>
+<span class="sourceLineNo">059</span>      HBaseClassTestRule.forClass(TestFailedProcCleanup.class);<a name="line.59"></a>
+<span class="sourceLineNo">060</span><a name="line.60"></a>
+<span class="sourceLineNo">061</span>  private static final Logger LOG = LoggerFactory.getLogger(TestFailedProcCleanup.class);<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  private static Configuration conf;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private static final TableName TABLE = TableName.valueOf("test");<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  private static final byte[] FAMILY = Bytes.toBytesBinary("f");<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  private static final int evictionDelay = 10 * 1000;<a name="line.67"></a>
+<span class="sourceLineNo">068</span><a name="line.68"></a>
+<span class="sourceLineNo">069</span>  @BeforeClass<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  public static void setUpBeforeClass() {<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    conf = TEST_UTIL.getConfiguration();<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    conf.setInt("hbase.procedure.cleaner.evict.ttl", evictionDelay);<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    conf.setInt("hbase.procedure.cleaner.evict.batch.size", 1);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  }<a name="line.74"></a>
+<span class="sourceLineNo">075</span><a name="line.75"></a>
+<span class="sourceLineNo">076</span>  @After<a name="line.76"></a>
+<span class="sourceLineNo">077</span>  public void tearDown() throws Exception {<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  @Test<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public void testFailCreateTable() throws Exception {<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserver.class.getName());<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    TEST_UTIL.startMiniCluster(3);<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    try {<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    } catch (AccessDeniedException e) {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      LOG.debug("Ignoring exception: ", e);<a name="line.88"></a>
+<span class="sourceLineNo">089</span>      Thread.sleep(evictionDelay * 3);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    }<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.93"></a>
+<span class="sourceLineNo">094</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.94"></a>
+<span class="sourceLineNo">095</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      }<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  @Test<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  public void testFailCreateTableAction() throws Exception {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserverHandler.class.getName());<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    TEST_UTIL.startMiniCluster(3);<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    try {<a name="line.105"></a>
+<span class="sourceLineNo">106</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>      fail("Table shouldn't be created");<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    } catch (AccessDeniedException e) {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      LOG.debug("Ignoring exception: ", e);<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      Thread.sleep(evictionDelay * 3);<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
+<span class="sourceLineNo">112</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.112"></a>
+<span class="sourceLineNo">113</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.115"></a>
+<span class="sourceLineNo">116</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      }<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static class CreateFailObserver implements MasterCoprocessor, MasterObserver {<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>    @Override<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    public void preCreateTable(ObserverContext&lt;MasterCoprocessorEnvironment&gt; env,<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        TableDescriptor desc, RegionInfo[] regions) throws IOException {<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.128"></a>
+<span class="sourceLineNo">129</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      }<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    }<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>    @Override<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      return Optional.of(this);<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  public static class CreateFailObserverHandler implements MasterCoprocessor, MasterObserver {<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>    @Override<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    public void preCreateTableAction(<a name="line.142"></a>
+<span class="sourceLineNo">143</span>        final ObserverContext&lt;MasterCoprocessorEnvironment&gt; ctx, final TableDescriptor desc,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        final RegionInfo[] regions) throws IOException {<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      }<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    }<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>    @Override<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      return Optional.of(this);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
+<span class="sourceLineNo">156</span>}<a name="line.156"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html
index c06f057..265908c 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.html
@@ -31,136 +31,137 @@
 <span class="sourceLineNo">023</span>import java.io.IOException;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.util.List;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import java.util.Optional;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.commons.logging.Log;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.commons.logging.LogFactory;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.TableName;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.coprocessor.MasterObserver;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.coprocessor.ObserverContext;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.junit.After;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.junit.BeforeClass;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.junit.ClassRule;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.junit.Test;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.junit.experimental.categories.Category;<a name="line.46"></a>
-<span class="sourceLineNo">047</span><a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;<a name="line.48"></a>
-<span class="sourceLineNo">049</span><a name="line.49"></a>
-<span class="sourceLineNo">050</span>/**<a name="line.50"></a>
-<span class="sourceLineNo">051</span> * Check if CompletedProcedureCleaner cleans up failed nonce procedures.<a name="line.51"></a>
-<span class="sourceLineNo">052</span> */<a name="line.52"></a>
-<span class="sourceLineNo">053</span>@Category(MediumTests.class)<a name="line.53"></a>
-<span class="sourceLineNo">054</span>public class TestFailedProcCleanup {<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  @ClassRule<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.57"></a>
-<span class="sourceLineNo">058</span>      HBaseClassTestRule.forClass(TestFailedProcCleanup.class);<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  private static final Log LOG = LogFactory.getLog(TestFailedProcCleanup.class);<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>  protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  private static Configuration conf;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  private static final TableName TABLE = TableName.valueOf("test");<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  private static final byte[] FAMILY = Bytes.toBytesBinary("f");<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  private static final int evictionDelay = 10 * 1000;<a name="line.66"></a>
-<span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  @BeforeClass<a name="line.68"></a>
-<span class="sourceLineNo">069</span>  public static void setUpBeforeClass() {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    conf = TEST_UTIL.getConfiguration();<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    conf.setInt("hbase.procedure.cleaner.evict.ttl", evictionDelay);<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    conf.setInt("hbase.procedure.cleaner.evict.batch.size", 1);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  }<a name="line.73"></a>
-<span class="sourceLineNo">074</span><a name="line.74"></a>
-<span class="sourceLineNo">075</span>  @After<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  public void tearDown() throws Exception {<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>  @Test<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  public void testFailCreateTable() throws Exception {<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserver.class.getName());<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    TEST_UTIL.startMiniCluster(3);<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    try {<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    } catch (AccessDeniedException e) {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      LOG.debug("Ignoring exception: ", e);<a name="line.87"></a>
-<span class="sourceLineNo">088</span>      Thread.sleep(evictionDelay * 3);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    }<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.90"></a>
-<span class="sourceLineNo">091</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.92"></a>
-<span class="sourceLineNo">093</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.93"></a>
-<span class="sourceLineNo">094</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.94"></a>
-<span class="sourceLineNo">095</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.95"></a>
-<span class="sourceLineNo">096</span>      }<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    }<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  }<a name="line.98"></a>
-<span class="sourceLineNo">099</span><a name="line.99"></a>
-<span class="sourceLineNo">100</span>  @Test<a name="line.100"></a>
-<span class="sourceLineNo">101</span>  public void testFailCreateTableAction() throws Exception {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserverHandler.class.getName());<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    TEST_UTIL.startMiniCluster(3);<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    try {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      fail("Table shouldn't be created");<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    } catch (AccessDeniedException e) {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      LOG.debug("Ignoring exception: ", e);<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      Thread.sleep(evictionDelay * 3);<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    }<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.111"></a>
-<span class="sourceLineNo">112</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.114"></a>
-<span class="sourceLineNo">115</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      }<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  public static class CreateFailObserver implements MasterCoprocessor, MasterObserver {<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>    @Override<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    public void preCreateTable(ObserverContext&lt;MasterCoprocessorEnvironment&gt; env,<a name="line.124"></a>
-<span class="sourceLineNo">125</span>        TableDescriptor desc, RegionInfo[] regions) throws IOException {<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    }<a name="line.130"></a>
-<span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>    @Override<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      return Optional.of(this);<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    }<a name="line.135"></a>
-<span class="sourceLineNo">136</span>  }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>  public static class CreateFailObserverHandler implements MasterCoprocessor, MasterObserver {<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>    @Override<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    public void preCreateTableAction(<a name="line.141"></a>
-<span class="sourceLineNo">142</span>        final ObserverContext&lt;MasterCoprocessorEnvironment&gt; ctx, final TableDescriptor desc,<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        final RegionInfo[] regions) throws IOException {<a name="line.143"></a>
-<span class="sourceLineNo">144</span><a name="line.144"></a>
-<span class="sourceLineNo">145</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      }<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    @Override<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      return Optional.of(this);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    }<a name="line.153"></a>
-<span class="sourceLineNo">154</span>  }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>}<a name="line.155"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.conf.Configuration;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.TableName;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.coprocessor.MasterObserver;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.coprocessor.ObserverContext;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.procedure2.Procedure;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.security.AccessDeniedException;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.junit.After;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.junit.BeforeClass;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.junit.ClassRule;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.junit.Test;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.junit.experimental.categories.Category;<a name="line.44"></a>
+<span class="sourceLineNo">045</span><a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.slf4j.Logger;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.slf4j.LoggerFactory;<a name="line.47"></a>
+<span class="sourceLineNo">048</span><a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>/**<a name="line.51"></a>
+<span class="sourceLineNo">052</span> * Check if CompletedProcedureCleaner cleans up failed nonce procedures.<a name="line.52"></a>
+<span class="sourceLineNo">053</span> */<a name="line.53"></a>
+<span class="sourceLineNo">054</span>@Category(MediumTests.class)<a name="line.54"></a>
+<span class="sourceLineNo">055</span>public class TestFailedProcCleanup {<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  @ClassRule<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.58"></a>
+<span class="sourceLineNo">059</span>      HBaseClassTestRule.forClass(TestFailedProcCleanup.class);<a name="line.59"></a>
+<span class="sourceLineNo">060</span><a name="line.60"></a>
+<span class="sourceLineNo">061</span>  private static final Logger LOG = LoggerFactory.getLogger(TestFailedProcCleanup.class);<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  private static Configuration conf;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>  private static final TableName TABLE = TableName.valueOf("test");<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  private static final byte[] FAMILY = Bytes.toBytesBinary("f");<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  private static final int evictionDelay = 10 * 1000;<a name="line.67"></a>
+<span class="sourceLineNo">068</span><a name="line.68"></a>
+<span class="sourceLineNo">069</span>  @BeforeClass<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  public static void setUpBeforeClass() {<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    conf = TEST_UTIL.getConfiguration();<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    conf.setInt("hbase.procedure.cleaner.evict.ttl", evictionDelay);<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    conf.setInt("hbase.procedure.cleaner.evict.batch.size", 1);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  }<a name="line.74"></a>
+<span class="sourceLineNo">075</span><a name="line.75"></a>
+<span class="sourceLineNo">076</span>  @After<a name="line.76"></a>
+<span class="sourceLineNo">077</span>  public void tearDown() throws Exception {<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  @Test<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public void testFailCreateTable() throws Exception {<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserver.class.getName());<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    TEST_UTIL.startMiniCluster(3);<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    try {<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    } catch (AccessDeniedException e) {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      LOG.debug("Ignoring exception: ", e);<a name="line.88"></a>
+<span class="sourceLineNo">089</span>      Thread.sleep(evictionDelay * 3);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    }<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.93"></a>
+<span class="sourceLineNo">094</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.94"></a>
+<span class="sourceLineNo">095</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      }<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  @Test<a name="line.101"></a>
+<span class="sourceLineNo">102</span>  public void testFailCreateTableAction() throws Exception {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    conf.set(MASTER_COPROCESSOR_CONF_KEY, CreateFailObserverHandler.class.getName());<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    TEST_UTIL.startMiniCluster(3);<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    try {<a name="line.105"></a>
+<span class="sourceLineNo">106</span>      TEST_UTIL.createTable(TABLE, FAMILY);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>      fail("Table shouldn't be created");<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    } catch (AccessDeniedException e) {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      LOG.debug("Ignoring exception: ", e);<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      Thread.sleep(evictionDelay * 3);<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    }<a name="line.111"></a>
+<span class="sourceLineNo">112</span>    List&lt;Procedure&lt;?&gt;&gt; procedureInfos =<a name="line.112"></a>
+<span class="sourceLineNo">113</span>        TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    for (Procedure procedureInfo : procedureInfos) {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      if (procedureInfo.getProcName().equals("CreateTableProcedure")<a name="line.115"></a>
+<span class="sourceLineNo">116</span>          &amp;&amp; procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>        fail("Found procedure " + procedureInfo + " that hasn't been cleaned up");<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      }<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  public static class CreateFailObserver implements MasterCoprocessor, MasterObserver {<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>    @Override<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    public void preCreateTable(ObserverContext&lt;MasterCoprocessorEnvironment&gt; env,<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        TableDescriptor desc, RegionInfo[] regions) throws IOException {<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.128"></a>
+<span class="sourceLineNo">129</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      }<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    }<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>    @Override<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      return Optional.of(this);<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
+<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>  public static class CreateFailObserverHandler implements MasterCoprocessor, MasterObserver {<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>    @Override<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    public void preCreateTableAction(<a name="line.142"></a>
+<span class="sourceLineNo">143</span>        final ObserverContext&lt;MasterCoprocessorEnvironment&gt; ctx, final TableDescriptor desc,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        final RegionInfo[] regions) throws IOException {<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>      if (desc.getTableName().equals(TABLE)) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>        throw new AccessDeniedException("Don't allow creation of table");<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      }<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    }<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>    @Override<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    public Optional&lt;MasterObserver&gt; getMasterObserver() {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      return Optional.of(this);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
+<span class="sourceLineNo">156</span>}<a name="line.156"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.html
index 686cd64..41bc674 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.html
@@ -26,7 +26,7 @@
 <span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.regionserver;<a name="line.18"></a>
 <span class="sourceLineNo">019</span><a name="line.19"></a>
 <span class="sourceLineNo">020</span>import java.io.IOException;<a name="line.20"></a>
-<span class="sourceLineNo">021</span>import org.apache.commons.lang.StringUtils;<a name="line.21"></a>
+<span class="sourceLineNo">021</span>import org.apache.commons.lang3.StringUtils;<a name="line.21"></a>
 <span class="sourceLineNo">022</span>import org.apache.hadoop.conf.Configuration;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import org.apache.hadoop.fs.Path;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.24"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.html
index ce48096..8a82f12 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.html
@@ -30,7 +30,7 @@
 <span class="sourceLineNo">022</span>import java.util.Optional;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.util.Set;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.util.stream.Collectors;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.commons.lang.RandomStringUtils;<a name="line.25"></a>
+<span class="sourceLineNo">025</span>import org.apache.commons.lang3.RandomStringUtils;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import org.apache.hadoop.conf.Configuration;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import org.apache.hadoop.fs.FileStatus;<a name="line.27"></a>
 <span class="sourceLineNo">028</span>import org.apache.hadoop.fs.FileSystem;<a name="line.28"></a>


[18/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
index e080cd6..73a1036 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
@@ -42,604 +42,605 @@
 <span class="sourceLineNo">034</span>import java.util.function.Predicate;<a name="line.34"></a>
 <span class="sourceLineNo">035</span>import java.util.stream.Collectors;<a name="line.35"></a>
 <span class="sourceLineNo">036</span><a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.commons.lang.builder.HashCodeBuilder;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.commons.logging.Log;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.commons.logging.LogFactory;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.conf.Configuration;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileStatus;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileSystem;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.Path;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.client.Get;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.client.Put;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.client.Result;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.Table;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.util.StringUtils;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.55"></a>
-<span class="sourceLineNo">056</span><a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span><a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>/**<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * Tracks file archiving and updates the hbase quota table.<a name="line.68"></a>
-<span class="sourceLineNo">069</span> */<a name="line.69"></a>
-<span class="sourceLineNo">070</span>@InterfaceAudience.Private<a name="line.70"></a>
-<span class="sourceLineNo">071</span>public class FileArchiverNotifierImpl implements FileArchiverNotifier {<a name="line.71"></a>
-<span class="sourceLineNo">072</span>  private static final Log LOG = LogFactory.getLog(FileArchiverNotifierImpl.class);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  private final Connection conn;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>  private final Configuration conf;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  private final FileSystem fs;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  private final TableName tn;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  private final ReadLock readLock;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private final WriteLock writeLock;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  private volatile long lastFullCompute = Long.MIN_VALUE;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  private List&lt;String&gt; currentSnapshots = Collections.emptyList();<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  private static final Map&lt;String,Object&gt; NAMESPACE_LOCKS = new HashMap&lt;&gt;();<a name="line.81"></a>
-<span class="sourceLineNo">082</span><a name="line.82"></a>
-<span class="sourceLineNo">083</span>  /**<a name="line.83"></a>
-<span class="sourceLineNo">084</span>   * An Exception thrown when SnapshotSize updates to hbase:quota fail to be written.<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   */<a name="line.85"></a>
-<span class="sourceLineNo">086</span>  @InterfaceAudience.Private<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  public static class QuotaSnapshotSizeSerializationException extends IOException {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    private static final long serialVersionUID = 1L;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>    public QuotaSnapshotSizeSerializationException(String msg) {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>      super(msg);<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    }<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  public FileArchiverNotifierImpl(<a name="line.95"></a>
-<span class="sourceLineNo">096</span>      Connection conn, Configuration conf, FileSystem fs, TableName tn) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    this.conn = conn;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    this.conf = conf;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    this.fs = fs;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    this.tn = tn;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    readLock = lock.readLock();<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    writeLock = lock.writeLock();<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  }<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  static synchronized Object getLockForNamespace(String namespace) {<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    return NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) -&gt; new Object());<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  }<a name="line.108"></a>
-<span class="sourceLineNo">109</span><a name="line.109"></a>
-<span class="sourceLineNo">110</span>  /**<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * Returns a strictly-increasing measure of time extracted by {@link System#nanoTime()}.<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   */<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  long getLastFullCompute() {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    return lastFullCompute;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  }<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  @Override<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  public void addArchivedFiles(Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    long start = System.nanoTime();<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    readLock.lock();<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    try {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>      // We want to catch the case where we got an archival request, but there was a full<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      // re-computation in progress that was blocking us. Most likely, the full computation is going<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      // to already include the changes we were going to make.<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      //<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      // Same as "start &lt; lastFullCompute" but avoiding numeric overflow per the<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      // System.nanoTime() javadoc<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      if (lastFullCompute != Long.MIN_VALUE &amp;&amp; start - lastFullCompute &lt; 0) {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>        if (LOG.isTraceEnabled()) {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>          LOG.trace("A full computation was performed after this request was received."<a name="line.130"></a>
-<span class="sourceLineNo">131</span>              + " Ignoring requested updates: " + fileSizes);<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        }<a name="line.132"></a>
-<span class="sourceLineNo">133</span>        return;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      }<a name="line.134"></a>
-<span class="sourceLineNo">135</span><a name="line.135"></a>
-<span class="sourceLineNo">136</span>      if (LOG.isTraceEnabled()) {<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        LOG.trace("currentSnapshots: " + currentSnapshots + " fileSize: "+ fileSizes);<a name="line.137"></a>
-<span class="sourceLineNo">138</span>      }<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>      // Write increment to quota table for the correct snapshot. Only do this if we have snapshots<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      // and some files that were archived.<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      if (!currentSnapshots.isEmpty() &amp;&amp; !fileSizes.isEmpty()) {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        // We get back the files which no snapshot referenced (the files which will be deleted soon)<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        groupArchivedFiledBySnapshotAndRecordSize(currentSnapshots, fileSizes);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      }<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    } finally {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      readLock.unlock();<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  }<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>  /**<a name="line.151"></a>
-<span class="sourceLineNo">152</span>   * For each file in the map, this updates the first snapshot (lexicographic snapshot name) that<a name="line.152"></a>
-<span class="sourceLineNo">153</span>   * references this file. The result of this computation is serialized to the quota table.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>   *<a name="line.154"></a>
-<span class="sourceLineNo">155</span>   * @param snapshots A collection of HBase snapshots to group the files into<a name="line.155"></a>
-<span class="sourceLineNo">156</span>   * @param fileSizes A map of file names to their sizes<a name="line.156"></a>
-<span class="sourceLineNo">157</span>   */<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  void groupArchivedFiledBySnapshotAndRecordSize(<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      List&lt;String&gt; snapshots, Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    // Make a copy as we'll modify it.<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    final Map&lt;String,Long&gt; filesToUpdate = new HashMap&lt;&gt;(fileSizes.size());<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    for (Entry&lt;String,Long&gt; entry : fileSizes) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      filesToUpdate.put(entry.getKey(), entry.getValue());<a name="line.163"></a>
-<span class="sourceLineNo">164</span>    }<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    // Track the change in size to each snapshot<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    final Map&lt;String,Long&gt; snapshotSizeChanges = new HashMap&lt;&gt;();<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    for (String snapshot : snapshots) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>      // For each file in `filesToUpdate`, check if `snapshot` refers to it.<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      // If `snapshot` does, remove it from `filesToUpdate` and add it to `snapshotSizeChanges`.<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      bucketFilesToSnapshot(snapshot, filesToUpdate, snapshotSizeChanges);<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      if (filesToUpdate.isEmpty()) {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        // If we have no more files recently archived, we have nothing more to check<a name="line.172"></a>
-<span class="sourceLineNo">173</span>        break;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      }<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    }<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    // We have computed changes to the snapshot size, we need to record them.<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    if (!snapshotSizeChanges.isEmpty()) {<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      if (LOG.isTraceEnabled()) {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>        LOG.trace("Writing snapshot size changes for: " + snapshotSizeChanges);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      }<a name="line.180"></a>
-<span class="sourceLineNo">181</span>      persistSnapshotSizeChanges(snapshotSizeChanges);<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  }<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /**<a name="line.185"></a>
-<span class="sourceLineNo">186</span>   * For the given snapshot, find all files which this {@code snapshotName} references. After a file<a name="line.186"></a>
-<span class="sourceLineNo">187</span>   * is found to be referenced by the snapshot, it is removed from {@code filesToUpdate} and<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * {@code snapshotSizeChanges} is updated in concert.<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   *<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * @param snapshotName The snapshot to check<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * @param filesToUpdate A mapping of archived files to their size<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * @param snapshotSizeChanges A mapping of snapshots and their change in size<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  void bucketFilesToSnapshot(<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      String snapshotName, Map&lt;String,Long&gt; filesToUpdate, Map&lt;String,Long&gt; snapshotSizeChanges)<a name="line.195"></a>
-<span class="sourceLineNo">196</span>          throws IOException {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    // A quick check to avoid doing work if the caller unnecessarily invoked this method.<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    if (filesToUpdate.isEmpty()) {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      return;<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    }<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>    Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(<a name="line.202"></a>
-<span class="sourceLineNo">203</span>        snapshotName, FSUtils.getRootDir(conf));<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    // For each region referenced by the snapshot<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      // For each column family in this region<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        // And each store file in that family<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          Long valueOrNull = filesToUpdate.remove(sf.getName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (valueOrNull != null) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            // This storefile was recently archived, we should update this snapshot with its size<a name="line.214"></a>
-<span class="sourceLineNo">215</span>            snapshotSizeChanges.merge(snapshotName, valueOrNull, Long::sum);<a name="line.215"></a>
-<span class="sourceLineNo">216</span>          }<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          // Short-circuit, if we have no more files that were archived, we don't need to iterate<a name="line.217"></a>
-<span class="sourceLineNo">218</span>          // over the rest of the snapshot.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          if (filesToUpdate.isEmpty()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        }<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    }<a name="line.224"></a>
-<span class="sourceLineNo">225</span>  }<a name="line.225"></a>
-<span class="sourceLineNo">226</span><a name="line.226"></a>
-<span class="sourceLineNo">227</span>  /**<a name="line.227"></a>
-<span class="sourceLineNo">228</span>   * Reads the current size for each snapshot to update, generates a new update based on that value,<a name="line.228"></a>
-<span class="sourceLineNo">229</span>   * and then writes the new update.<a name="line.229"></a>
-<span class="sourceLineNo">230</span>   *<a name="line.230"></a>
-<span class="sourceLineNo">231</span>   * @param snapshotSizeChanges A map of snapshot name to size change<a name="line.231"></a>
-<span class="sourceLineNo">232</span>   */<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  void persistSnapshotSizeChanges(Map&lt;String,Long&gt; snapshotSizeChanges) throws IOException {<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      // Create a list (with a more typical ordering implied)<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      final List&lt;Entry&lt;String,Long&gt;&gt; snapshotSizeEntries = new ArrayList&lt;&gt;(<a name="line.236"></a>
-<span class="sourceLineNo">237</span>          snapshotSizeChanges.entrySet());<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      // Create the Gets for each snapshot we need to update<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      final List&lt;Get&gt; snapshotSizeGets = snapshotSizeEntries.stream()<a name="line.239"></a>
-<span class="sourceLineNo">240</span>          .map((e) -&gt; QuotaTableUtil.makeGetForSnapshotSize(tn, e.getKey()))<a name="line.240"></a>
-<span class="sourceLineNo">241</span>          .collect(Collectors.toList());<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      final Iterator&lt;Entry&lt;String,Long&gt;&gt; iterator = snapshotSizeEntries.iterator();<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      // A List to store each Put we'll create from the Get's we retrieve<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      final List&lt;Put&gt; updates = new ArrayList&lt;&gt;(snapshotSizeEntries.size());<a name="line.244"></a>
-<span class="sourceLineNo">245</span><a name="line.245"></a>
-<span class="sourceLineNo">246</span>      // TODO Push this down to the RegionServer with a coprocessor:<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      //<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      // We would really like to piggy-back on the row-lock already being grabbed<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      // to handle the update of the row in the quota table. However, because the value<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      // is a serialized protobuf, the standard Increment API doesn't work for us. With a CP, we<a name="line.250"></a>
-<span class="sourceLineNo">251</span>      // can just send the size deltas to the RS and atomically update the serialized PB object<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      // while relying on the row-lock for synchronization.<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      //<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      // Synchronizing on the namespace string is a "minor smell" but passable as this is<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      // only invoked via a single caller (the active Master). Using the namespace name lets us<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      // have some parallelism without worry of on caller seeing stale data from the quota table.<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      synchronized (getLockForNamespace(tn.getNamespaceAsString())) {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        final Result[] existingSnapshotSizes = quotaTable.get(snapshotSizeGets);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        long totalSizeChange = 0;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        // Read the current size values (if they exist) to generate the new value<a name="line.260"></a>
-<span class="sourceLineNo">261</span>        for (Result result : existingSnapshotSizes) {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>          Entry&lt;String,Long&gt; entry = iterator.next();<a name="line.262"></a>
-<span class="sourceLineNo">263</span>          String snapshot = entry.getKey();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>          Long size = entry.getValue();<a name="line.264"></a>
-<span class="sourceLineNo">265</span>          // Track the total size change for the namespace this table belongs in<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          totalSizeChange += size;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          // Get the size of the previous value (or zero)<a name="line.267"></a>
-<span class="sourceLineNo">268</span>          long previousSize = getSnapshotSizeFromResult(result);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>          // Create an update. A file was archived from the table, so the table's size goes<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          // down, but the snapshot's size goes up.<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          updates.add(QuotaTableUtil.createPutForSnapshotSize(tn, snapshot, previousSize + size));<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        }<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>        // Create an update for the summation of all snapshots in the namespace<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        if (totalSizeChange != 0) {<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          long previousSize = getPreviousNamespaceSnapshotSize(<a name="line.276"></a>
-<span class="sourceLineNo">277</span>              quotaTable, tn.getNamespaceAsString());<a name="line.277"></a>
-<span class="sourceLineNo">278</span>          updates.add(QuotaTableUtil.createPutForNamespaceSnapshotSize(<a name="line.278"></a>
-<span class="sourceLineNo">279</span>              tn.getNamespaceAsString(), previousSize + totalSizeChange));<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>        // Send all of the quota table updates in one batch.<a name="line.282"></a>
-<span class="sourceLineNo">283</span>        List&lt;Object&gt; failures = new ArrayList&lt;&gt;();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        final Object[] results = new Object[updates.size()];<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        quotaTable.batch(updates, results);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        for (Object result : results) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          // A null result is an error condition (all RPC attempts failed)<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          if (!(result instanceof Result)) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>            failures.add(result);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>          }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>        }<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        // Propagate a failure if any updates failed<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        if (!failures.isEmpty()) {<a name="line.293"></a>
-<span class="sourceLineNo">294</span>          throw new QuotaSnapshotSizeSerializationException(<a name="line.294"></a>
-<span class="sourceLineNo">295</span>              "Failed to write some snapshot size updates: " + failures);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        }<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    } catch (InterruptedException e) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      Thread.currentThread().interrupt();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      return;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
-<span class="sourceLineNo">303</span><a name="line.303"></a>
-<span class="sourceLineNo">304</span>  /**<a name="line.304"></a>
-<span class="sourceLineNo">305</span>   * Fetches the current size of all snapshots in the given {@code namespace}.<a name="line.305"></a>
-<span class="sourceLineNo">306</span>   *<a name="line.306"></a>
-<span class="sourceLineNo">307</span>   * @param quotaTable The HBase quota table<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * @param namespace Namespace to fetch the sum of snapshot sizes for<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   * @return The size of all snapshot sizes for the namespace in bytes.<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   */<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  long getPreviousNamespaceSnapshotSize(Table quotaTable, String namespace) throws IOException {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    // Update the size of each snapshot for all snapshots in a namespace.<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    Result r = quotaTable.get(<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        QuotaTableUtil.createGetNamespaceSnapshotSize(namespace));<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    return getSnapshotSizeFromResult(r);<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  }<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  /**<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * Extracts the size component from a serialized {@link SpaceQuotaSnapshot} protobuf.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   *<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @param r A Result containing one cell with a SpaceQuotaSnapshot protobuf<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @return The size in bytes of the snapshot.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  long getSnapshotSizeFromResult(Result r) throws InvalidProtocolBufferException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>    // Per javadoc, Result should only be null if an exception was thrown. So, if we're here,<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    // we should be non-null. If we can't advance to the first cell, same as "no cell".<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    if (!r.isEmpty() &amp;&amp; r.advance()) {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      return QuotaTableUtil.parseSnapshotSize(r.current());<a name="line.328"></a>
-<span class="sourceLineNo">329</span>    }<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    return 0L;<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  }<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  @Override<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  public long computeAndStoreSnapshotSizes(<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      Collection&lt;String&gt; currentSnapshots) throws IOException {<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    // Record what the current snapshots are<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    this.currentSnapshots = new ArrayList&lt;&gt;(currentSnapshots);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    Collections.sort(this.currentSnapshots);<a name="line.338"></a>
-<span class="sourceLineNo">339</span><a name="line.339"></a>
-<span class="sourceLineNo">340</span>    // compute new size for table + snapshots for that table<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    List&lt;SnapshotWithSize&gt; snapshotSizes = computeSnapshotSizes(this.currentSnapshots);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    if (LOG.isTraceEnabled()) {<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      LOG.trace("Computed snapshot sizes for " + tn + " of " + snapshotSizes);<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>    // Compute the total size of all snapshots against our table<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    final long totalSnapshotSize = snapshotSizes.stream().mapToLong((sws) -&gt; sws.getSize()).sum();<a name="line.347"></a>
-<span class="sourceLineNo">348</span><a name="line.348"></a>
-<span class="sourceLineNo">349</span>    writeLock.lock();<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    try {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>      // Persist the size of each snapshot<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.352"></a>
-<span class="sourceLineNo">353</span>        persistSnapshotSizes(quotaTable, snapshotSizes);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      }<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>      // Report the last time we did a recomputation<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      lastFullCompute = System.nanoTime();<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>      return totalSnapshotSize;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    } finally {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>      writeLock.unlock();<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    }<a name="line.362"></a>
-<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
-<span class="sourceLineNo">364</span><a name="line.364"></a>
-<span class="sourceLineNo">365</span>  @Override<a name="line.365"></a>
-<span class="sourceLineNo">366</span>  public String toString() {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    StringBuilder sb = new StringBuilder();<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    sb.append(getClass().getSimpleName()).append("[");<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    sb.append("tableName=").append(tn).append(", currentSnapshots=");<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    sb.append(currentSnapshots).append(", lastFullCompute=").append(lastFullCompute);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    return sb.append("]").toString();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  }<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * Computes the size of each snapshot against the table referenced by {@code this}.<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   *<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * @param snapshots A sorted list of snapshots against {@code tn}.<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * @return A list of the size for each snapshot against {@code tn}.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   */<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  List&lt;SnapshotWithSize&gt; computeSnapshotSizes(List&lt;String&gt; snapshots) throws IOException {<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    final List&lt;SnapshotWithSize&gt; snapshotSizes = new ArrayList&lt;&gt;(snapshots.size());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    final Path rootDir = FSUtils.getRootDir(conf);<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>    // Get the map of store file names to store file path for this table<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    final Set&lt;String&gt; tableReferencedStoreFiles;<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    try {<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      tableReferencedStoreFiles = FSUtils.getTableStoreFilePathMap(fs, rootDir).keySet();<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    } catch (InterruptedException e) {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      Thread.currentThread().interrupt();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      return null;<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>    if (LOG.isTraceEnabled()) {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      LOG.trace("Paths for " + tn + ": " + tableReferencedStoreFiles);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>    // For each snapshot on this table, get the files which the snapshot references which<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    // the table does not.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    Set&lt;String&gt; snapshotReferencedFiles = new HashSet&lt;&gt;();<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    for (String snapshotName : snapshots) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, rootDir);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>      if (LOG.isTraceEnabled()) {<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        LOG.trace("Files referenced by other snapshots: " + snapshotReferencedFiles);<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      }<a name="line.407"></a>
-<span class="sourceLineNo">408</span><a name="line.408"></a>
-<span class="sourceLineNo">409</span>      // Get the set of files from the manifest that this snapshot references which are not also<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      // referenced by the originating table.<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      Set&lt;StoreFileReference&gt; unreferencedStoreFileNames = getStoreFilesFromSnapshot(<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          manifest, (sfn) -&gt; !tableReferencedStoreFiles.contains(sfn)<a name="line.412"></a>
-<span class="sourceLineNo">413</span>              &amp;&amp; !snapshotReferencedFiles.contains(sfn));<a name="line.413"></a>
-<span class="sourceLineNo">414</span><a name="line.414"></a>
-<span class="sourceLineNo">415</span>      if (LOG.isTraceEnabled()) {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>        LOG.trace("Snapshot " + snapshotName + " solely references the files: "<a name="line.416"></a>
-<span class="sourceLineNo">417</span>            + unreferencedStoreFileNames);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      }<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>      // Compute the size of the store files for this snapshot<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      long size = getSizeOfStoreFiles(tn, unreferencedStoreFileNames);<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      if (LOG.isTraceEnabled()) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        LOG.trace("Computed size of " + snapshotName + " to be " + size);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      }<a name="line.424"></a>
-<span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>      // Persist this snapshot's size into the map<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      snapshotSizes.add(new SnapshotWithSize(snapshotName, size));<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>      // Make sure that we don't double-count the same file<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      for (StoreFileReference ref : unreferencedStoreFileNames) {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        for (String fileNames : ref.getFamilyToFilesMapping().values()) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>          snapshotReferencedFiles.add(fileNames);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>        }<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      }<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>    return snapshotSizes;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>  }<a name="line.438"></a>
-<span class="sourceLineNo">439</span><a name="line.439"></a>
-<span class="sourceLineNo">440</span>  /**<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   * Computes the size of each store file in {@code storeFileNames}<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   */<a name="line.442"></a>
-<span class="sourceLineNo">443</span>  long getSizeOfStoreFiles(TableName tn, Set&lt;StoreFileReference&gt; storeFileNames) {<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    return storeFileNames.stream()<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        .collect(Collectors.summingLong((sfr) -&gt; getSizeOfStoreFile(tn, sfr)));<a name="line.445"></a>
-<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>  /**<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   * Computes the size of the store files for a single region.<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   */<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  long getSizeOfStoreFile(TableName tn, StoreFileReference storeFileName) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    String regionName = storeFileName.getRegionName();<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    return storeFileName.getFamilyToFilesMapping()<a name="line.453"></a>
-<span class="sourceLineNo">454</span>        .entries().stream()<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        .collect(Collectors.summingLong((e) -&gt;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>            getSizeOfStoreFile(tn, regionName, e.getKey(), e.getValue())));<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * Computes the size of the store file given its name, region and family name in<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * the archive directory.<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  long getSizeOfStoreFile(<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      TableName tn, String regionName, String family, String storeFile) {<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    Path familyArchivePath;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      familyArchivePath = HFileArchiveUtil.getStoreArchivePath(conf, tn, regionName, family);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } catch (IOException e) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      LOG.warn("Could not compute path for the archive directory for the region", e);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      return 0L;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    }<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    Path fileArchivePath = new Path(familyArchivePath, storeFile);<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    try {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>      if (fs.exists(fileArchivePath)) {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>        FileStatus[] status = fs.listStatus(fileArchivePath);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        if (1 != status.length) {<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          LOG.warn("Expected " + fileArchivePath +<a name="line.477"></a>
-<span class="sourceLineNo">478</span>              " to be a file but was a directory, ignoring reference");<a name="line.478"></a>
-<span class="sourceLineNo">479</span>          return 0L;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>        }<a name="line.480"></a>
-<span class="sourceLineNo">481</span>        return status[0].getLen();<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    } catch (IOException e) {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      LOG.warn("Could not obtain the status of " + fileArchivePath, e);<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      return 0L;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    LOG.warn("Expected " + fileArchivePath + " to exist but does not, ignoring reference.");<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    return 0L;<a name="line.488"></a>
-<span class="sourceLineNo">489</span>  }<a name="line.489"></a>
-<span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>  /**<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * Extracts the names of the store files referenced by this snapshot which satisfy the given<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * predicate (the predicate returns {@code true}).<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  Set&lt;StoreFileReference&gt; getStoreFilesFromSnapshot(<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      SnapshotManifest manifest, Predicate&lt;String&gt; filter) {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    Set&lt;StoreFileReference&gt; references = new HashSet&lt;&gt;();<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    // For each region referenced by the snapshot<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      StoreFileReference regionReference = new StoreFileReference(<a name="line.500"></a>
-<span class="sourceLineNo">501</span>          ProtobufUtil.toRegionInfo(rm.getRegionInfo()).getEncodedName());<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>      // For each column family in this region<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        final String familyName = ff.getFamilyName().toStringUtf8();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        // And each store file in that family<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          String storeFileName = sf.getName();<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          // A snapshot only "inherits" a files size if it uniquely refers to it (no table<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          // and no other snapshot references it).<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          if (filter.test(storeFileName)) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>            regionReference.addFamilyStoreFile(familyName, storeFileName);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          }<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      // Only add this Region reference if we retained any files.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      if (!regionReference.getFamilyToFilesMapping().isEmpty()) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        references.add(regionReference);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>    }<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    return references;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  }<a name="line.522"></a>
-<span class="sourceLineNo">523</span><a name="line.523"></a>
-<span class="sourceLineNo">524</span>  /**<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * Writes the snapshot sizes to the provided {@code table}.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   */<a name="line.526"></a>
-<span class="sourceLineNo">527</span>  void persistSnapshotSizes(<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      Table table, List&lt;SnapshotWithSize&gt; snapshotSizes) throws IOException {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    // Convert each entry in the map to a Put and write them to the quota table<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    table.put(snapshotSizes<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        .stream()<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        .map(sws -&gt; QuotaTableUtil.createPutForSnapshotSize(<a name="line.532"></a>
-<span class="sourceLineNo">533</span>            tn, sws.getName(), sws.getSize()))<a name="line.533"></a>
-<span class="sourceLineNo">534</span>        .collect(Collectors.toList()));<a name="line.534"></a>
-<span class="sourceLineNo">535</span>  }<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>  /**<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * A struct encapsulating the name of a snapshot and its "size" on the filesystem. This size is<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * defined as the amount of filesystem space taken by the files the snapshot refers to which<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * the originating table no longer refers to.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  static class SnapshotWithSize {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    private final String name;<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    private final long size;<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    SnapshotWithSize(String name, long size) {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      this.name = Objects.requireNonNull(name);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.size = size;<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    String getName() {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      return name;<a name="line.552"></a>
-<span class="sourceLineNo">553</span>    }<a name="line.553"></a>
-<span class="sourceLineNo">554</span><a name="line.554"></a>
-<span class="sourceLineNo">555</span>    long getSize() {<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      return size;<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    }<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    @Override<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    public int hashCode() {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      return new HashCodeBuilder().append(name).append(size).toHashCode();<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    @Override<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    public boolean equals(Object o) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      if (this == o) {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>        return true;<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      }<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>      if (!(o instanceof SnapshotWithSize)) {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>        return false;<a name="line.571"></a>
-<span class="sourceLineNo">572</span>      }<a name="line.572"></a>
-<span class="sourceLineNo">573</span><a name="line.573"></a>
-<span class="sourceLineNo">574</span>      SnapshotWithSize other = (SnapshotWithSize) o;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      return name.equals(other.name) &amp;&amp; size == other.size;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    @Override<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    public String toString() {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      StringBuilder sb = new StringBuilder(32);<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      return sb.append("SnapshotWithSize:[").append(name).append(" ")<a name="line.581"></a>
-<span class="sourceLineNo">582</span>          .append(StringUtils.byteDesc(size)).append("]").toString();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    }<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>  /**<a name="line.586"></a>
-<span class="sourceLineNo">587</span>   * A reference to a collection of files in the archive directory for a single region.<a name="line.587"></a>
-<span class="sourceLineNo">588</span>   */<a name="line.588"></a>
-<span class="sourceLineNo">589</span>  static class StoreFileReference {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    private final String regionName;<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    private final Multimap&lt;String,String&gt; familyToFiles;<a name="line.591"></a>
-<span class="sourceLineNo">592</span><a name="line.592"></a>
-<span class="sourceLineNo">593</span>    StoreFileReference(String regionName) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      this.regionName = Objects.requireNonNull(regionName);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      familyToFiles = HashMultimap.create();<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    }<a name="line.596"></a>
-<span class="sourceLineNo">597</span><a name="line.597"></a>
-<span class="sourceLineNo">598</span>    String getRegionName() {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      return regionName;<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    }<a name="line.600"></a>
-<span class="sourceLineNo">601</span><a name="line.601"></a>
-<span class="sourceLineNo">602</span>    Multimap&lt;String,String&gt; getFamilyToFilesMapping() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      return familyToFiles;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    }<a name="line.604"></a>
-<span class="sourceLineNo">605</span><a name="line.605"></a>
-<span class="sourceLineNo">606</span>    void addFamilyStoreFile(String family, String storeFileName) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>      familyToFiles.put(family, storeFileName);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    }<a name="line.608"></a>
-<span class="sourceLineNo">609</span><a name="line.609"></a>
-<span class="sourceLineNo">610</span>    @Override<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    public int hashCode() {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      return new HashCodeBuilder().append(regionName).append(familyToFiles).toHashCode();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>    @Override<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    public boolean equals(Object o) {<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      if (this == o) {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>        return true;<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      }<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      if (!(o instanceof StoreFileReference)) {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>        return false;<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      }<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      StoreFileReference other = (StoreFileReference) o;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      return regionName.equals(other.regionName) &amp;&amp; familyToFiles.equals(other.familyToFiles);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    }<a name="line.625"></a>
-<span class="sourceLineNo">626</span><a name="line.626"></a>
-<span class="sourceLineNo">627</span>    @Override<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    public String toString() {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>      StringBuilder sb = new StringBuilder();<a name="line.629"></a>
-<span class="sourceLineNo">630</span>      return sb.append("StoreFileReference[region=").append(regionName).append(", files=")<a name="line.630"></a>
-<span class="sourceLineNo">631</span>          .append(familyToFiles).append("]").toString();<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    }<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  }<a name="line.633"></a>
-<span class="sourceLineNo">634</span>}<a name="line.634"></a>
+<span class="sourceLineNo">037</span>import org.apache.commons.lang3.builder.HashCodeBuilder;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileStatus;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FileSystem;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.Path;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.TableName;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.client.Get;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.Put;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.client.Result;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.client.Table;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.util.StringUtils;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.slf4j.Logger;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.slf4j.LoggerFactory;<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;<a name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;<a name="line.66"></a>
+<span class="sourceLineNo">067</span><a name="line.67"></a>
+<span class="sourceLineNo">068</span>/**<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * Tracks file archiving and updates the hbase quota table.<a name="line.69"></a>
+<span class="sourceLineNo">070</span> */<a name="line.70"></a>
+<span class="sourceLineNo">071</span>@InterfaceAudience.Private<a name="line.71"></a>
+<span class="sourceLineNo">072</span>public class FileArchiverNotifierImpl implements FileArchiverNotifier {<a name="line.72"></a>
+<span class="sourceLineNo">073</span>  private static final Logger LOG = LoggerFactory.getLogger(FileArchiverNotifierImpl.class);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  private final Connection conn;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  private final Configuration conf;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>  private final FileSystem fs;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>  private final TableName tn;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  private final ReadLock readLock;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  private final WriteLock writeLock;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  private volatile long lastFullCompute = Long.MIN_VALUE;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  private List&lt;String&gt; currentSnapshots = Collections.emptyList();<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  private static final Map&lt;String,Object&gt; NAMESPACE_LOCKS = new HashMap&lt;&gt;();<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * An Exception thrown when SnapshotSize updates to hbase:quota fail to be written.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  @InterfaceAudience.Private<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  public static class QuotaSnapshotSizeSerializationException extends IOException {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    private static final long serialVersionUID = 1L;<a name="line.89"></a>
+<span class="sourceLineNo">090</span><a name="line.90"></a>
+<span class="sourceLineNo">091</span>    public QuotaSnapshotSizeSerializationException(String msg) {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>      super(msg);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    }<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
+<span class="sourceLineNo">095</span><a name="line.95"></a>
+<span class="sourceLineNo">096</span>  public FileArchiverNotifierImpl(<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      Connection conn, Configuration conf, FileSystem fs, TableName tn) {<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    this.conn = conn;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    this.conf = conf;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    this.fs = fs;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    this.tn = tn;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    readLock = lock.readLock();<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    writeLock = lock.writeLock();<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  static synchronized Object getLockForNamespace(String namespace) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    return NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) -&gt; new Object());<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>  /**<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Returns a strictly-increasing measure of time extracted by {@link System#nanoTime()}.<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   */<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  long getLastFullCompute() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    return lastFullCompute;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  @Override<a name="line.118"></a>
+<span class="sourceLineNo">119</span>  public void addArchivedFiles(Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    long start = System.nanoTime();<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    readLock.lock();<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    try {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      // We want to catch the case where we got an archival request, but there was a full<a name="line.123"></a>
+<span class="sourceLineNo">124</span>      // re-computation in progress that was blocking us. Most likely, the full computation is going<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      // to already include the changes we were going to make.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      //<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      // Same as "start &lt; lastFullCompute" but avoiding numeric overflow per the<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      // System.nanoTime() javadoc<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      if (lastFullCompute != Long.MIN_VALUE &amp;&amp; start - lastFullCompute &lt; 0) {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>        if (LOG.isTraceEnabled()) {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>          LOG.trace("A full computation was performed after this request was received."<a name="line.131"></a>
+<span class="sourceLineNo">132</span>              + " Ignoring requested updates: " + fileSizes);<a name="line.132"></a>
+<span class="sourceLineNo">133</span>        }<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        return;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      }<a name="line.135"></a>
+<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">137</span>      if (LOG.isTraceEnabled()) {<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        LOG.trace("currentSnapshots: " + currentSnapshots + " fileSize: "+ fileSizes);<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      }<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>      // Write increment to quota table for the correct snapshot. Only do this if we have snapshots<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      // and some files that were archived.<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      if (!currentSnapshots.isEmpty() &amp;&amp; !fileSizes.isEmpty()) {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        // We get back the files which no snapshot referenced (the files which will be deleted soon)<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        groupArchivedFiledBySnapshotAndRecordSize(currentSnapshots, fileSizes);<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      }<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    } finally {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      readLock.unlock();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    }<a name="line.149"></a>
+<span class="sourceLineNo">150</span>  }<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>  /**<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * For each file in the map, this updates the first snapshot (lexicographic snapshot name) that<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   * references this file. The result of this computation is serialized to the quota table.<a name="line.154"></a>
+<span class="sourceLineNo">155</span>   *<a name="line.155"></a>
+<span class="sourceLineNo">156</span>   * @param snapshots A collection of HBase snapshots to group the files into<a name="line.156"></a>
+<span class="sourceLineNo">157</span>   * @param fileSizes A map of file names to their sizes<a name="line.157"></a>
+<span class="sourceLineNo">158</span>   */<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  void groupArchivedFiledBySnapshotAndRecordSize(<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      List&lt;String&gt; snapshots, Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    // Make a copy as we'll modify it.<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    final Map&lt;String,Long&gt; filesToUpdate = new HashMap&lt;&gt;(fileSizes.size());<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    for (Entry&lt;String,Long&gt; entry : fileSizes) {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      filesToUpdate.put(entry.getKey(), entry.getValue());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    }<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    // Track the change in size to each snapshot<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    final Map&lt;String,Long&gt; snapshotSizeChanges = new HashMap&lt;&gt;();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    for (String snapshot : snapshots) {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      // For each file in `filesToUpdate`, check if `snapshot` refers to it.<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      // If `snapshot` does, remove it from `filesToUpdate` and add it to `snapshotSizeChanges`.<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      bucketFilesToSnapshot(snapshot, filesToUpdate, snapshotSizeChanges);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      if (filesToUpdate.isEmpty()) {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        // If we have no more files recently archived, we have nothing more to check<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        break;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      }<a name="line.175"></a>
+<span class="sourceLineNo">176</span>    }<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    // We have computed changes to the snapshot size, we need to record them.<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    if (!snapshotSizeChanges.isEmpty()) {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      if (LOG.isTraceEnabled()) {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>        LOG.trace("Writing snapshot size changes for: " + snapshotSizeChanges);<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      }<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      persistSnapshotSizeChanges(snapshotSizeChanges);<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>  /**<a name="line.186"></a>
+<span class="sourceLineNo">187</span>   * For the given snapshot, find all files which this {@code snapshotName} references. After a file<a name="line.187"></a>
+<span class="sourceLineNo">188</span>   * is found to be referenced by the snapshot, it is removed from {@code filesToUpdate} and<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * {@code snapshotSizeChanges} is updated in concert.<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   *<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * @param snapshotName The snapshot to check<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * @param filesToUpdate A mapping of archived files to their size<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * @param snapshotSizeChanges A mapping of snapshots and their change in size<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  void bucketFilesToSnapshot(<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      String snapshotName, Map&lt;String,Long&gt; filesToUpdate, Map&lt;String,Long&gt; snapshotSizeChanges)<a name="line.196"></a>
+<span class="sourceLineNo">197</span>          throws IOException {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    // A quick check to avoid doing work if the caller unnecessarily invoked this method.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    if (filesToUpdate.isEmpty()) {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      return;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>    Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(<a name="line.203"></a>
+<span class="sourceLineNo">204</span>        snapshotName, FSUtils.getRootDir(conf));<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    // For each region referenced by the snapshot<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>      // For each column family in this region<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>        // And each store file in that family<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          Long valueOrNull = filesToUpdate.remove(sf.getName());<a name="line.213"></a>
+<span class="sourceLineNo">214</span>          if (valueOrNull != null) {<a name="line.214"></a>
+<span class="sourceLineNo">215</span>            // This storefile was recently archived, we should update this snapshot with its size<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            snapshotSizeChanges.merge(snapshotName, valueOrNull, Long::sum);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          }<a name="line.217"></a>
+<span class="sourceLineNo">218</span>          // Short-circuit, if we have no more files that were archived, we don't need to iterate<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          // over the rest of the snapshot.<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          if (filesToUpdate.isEmpty()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            return;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>          }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      }<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    }<a name="line.225"></a>
+<span class="sourceLineNo">226</span>  }<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  /**<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   * Reads the current size for each snapshot to update, generates a new update based on that value,<a name="line.229"></a>
+<span class="sourceLineNo">230</span>   * and then writes the new update.<a name="line.230"></a>
+<span class="sourceLineNo">231</span>   *<a name="line.231"></a>
+<span class="sourceLineNo">232</span>   * @param snapshotSizeChanges A map of snapshot name to size change<a name="line.232"></a>
+<span class="sourceLineNo">233</span>   */<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  void persistSnapshotSizeChanges(Map&lt;String,Long&gt; snapshotSizeChanges) throws IOException {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      // Create a list (with a more typical ordering implied)<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      final List&lt;Entry&lt;String,Long&gt;&gt; snapshotSizeEntries = new ArrayList&lt;&gt;(<a name="line.237"></a>
+<span class="sourceLineNo">238</span>          snapshotSizeChanges.entrySet());<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      // Create the Gets for each snapshot we need to update<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      final List&lt;Get&gt; snapshotSizeGets = snapshotSizeEntries.stream()<a name="line.240"></a>
+<span class="sourceLineNo">241</span>          .map((e) -&gt; QuotaTableUtil.makeGetForSnapshotSize(tn, e.getKey()))<a name="line.241"></a>
+<span class="sourceLineNo">242</span>          .collect(Collectors.toList());<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      final Iterator&lt;Entry&lt;String,Long&gt;&gt; iterator = snapshotSizeEntries.iterator();<a name="line.243"></a>
+<span class="sourceLineNo">244</span>      // A List to store each Put we'll create from the Get's we retrieve<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      final List&lt;Put&gt; updates = new ArrayList&lt;&gt;(snapshotSizeEntries.size());<a name="line.245"></a>
+<span class="sourceLineNo">246</span><a name="line.246"></a>
+<span class="sourceLineNo">247</span>      // TODO Push this down to the RegionServer with a coprocessor:<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      //<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      // We would really like to piggy-back on the row-lock already being grabbed<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      // to handle the update of the row in the quota table. However, because the value<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      // is a serialized protobuf, the standard Increment API doesn't work for us. With a CP, we<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      // can just send the size deltas to the RS and atomically update the serialized PB object<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      // while relying on the row-lock for synchronization.<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      //<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      // Synchronizing on the namespace string is a "minor smell" but passable as this is<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      // only invoked via a single caller (the active Master). Using the namespace name lets us<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      // have some parallelism without worry of on caller seeing stale data from the quota table.<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      synchronized (getLockForNamespace(tn.getNamespaceAsString())) {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        final Result[] existingSnapshotSizes = quotaTable.get(snapshotSizeGets);<a name="line.259"></a>
+<span class="sourceLineNo">260</span>        long totalSizeChange = 0;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        // Read the current size values (if they exist) to generate the new value<a name="line.261"></a>
+<span class="sourceLineNo">262</span>        for (Result result : existingSnapshotSizes) {<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          Entry&lt;String,Long&gt; entry = iterator.next();<a name="line.263"></a>
+<span class="sourceLineNo">264</span>          String snapshot = entry.getKey();<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          Long size = entry.getValue();<a name="line.265"></a>
+<span class="sourceLineNo">266</span>          // Track the total size change for the namespace this table belongs in<a name="line.266"></a>
+<span class="sourceLineNo">267</span>          totalSizeChange += size;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>          // Get the size of the previous value (or zero)<a name="line.268"></a>
+<span class="sourceLineNo">269</span>          long previousSize = getSnapshotSizeFromResult(result);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>          // Create an update. A file was archived from the table, so the table's size goes<a name="line.270"></a>
+<span class="sourceLineNo">271</span>          // down, but the snapshot's size goes up.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>          updates.add(QuotaTableUtil.createPutForSnapshotSize(tn, snapshot, previousSize + size));<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>        // Create an update for the summation of all snapshots in the namespace<a name="line.275"></a>
+<span class="sourceLineNo">276</span>        if (totalSizeChange != 0) {<a name="line.276"></a>
+<span class="sourceLineNo">277</span>          long previousSize = getPreviousNamespaceSnapshotSize(<a name="line.277"></a>
+<span class="sourceLineNo">278</span>              quotaTable, tn.getNamespaceAsString());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>          updates.add(QuotaTableUtil.createPutForNamespaceSnapshotSize(<a name="line.279"></a>
+<span class="sourceLineNo">280</span>              tn.getNamespaceAsString(), previousSize + totalSizeChange));<a name="line.280"></a>
+<span class="sourceLineNo">281</span>        }<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>        // Send all of the quota table updates in one batch.<a name="line.283"></a>
+<span class="sourceLineNo">284</span>        List&lt;Object&gt; failures = new ArrayList&lt;&gt;();<a name="line.284"></a>
+<span class="sourceLineNo">285</span>        final Object[] results = new Object[updates.size()];<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        quotaTable.batch(updates, results);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>        for (Object result : results) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>          // A null result is an error condition (all RPC attempts failed)<a name="line.288"></a>
+<span class="sourceLineNo">289</span>          if (!(result instanceof Result)) {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>            failures.add(result);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>          }<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        }<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        // Propagate a failure if any updates failed<a name="line.293"></a>
+<span class="sourceLineNo">294</span>        if (!failures.isEmpty()) {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          throw new QuotaSnapshotSizeSerializationException(<a name="line.295"></a>
+<span class="sourceLineNo">296</span>

<TRUNCATED>

[07/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
index 09e41f8..e103c0d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
@@ -29,179 +29,182 @@
 <span class="sourceLineNo">021</span>import java.util.Map;<a name="line.21"></a>
 <span class="sourceLineNo">022</span>import java.util.concurrent.ConcurrentSkipListMap;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.util.concurrent.atomic.AtomicInteger;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import org.apache.commons.logging.Log;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.commons.logging.LogFactory;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.conf.Configuration;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.Cell;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.regionserver.Region;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.regionserver.Store;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>/**<a name="line.35"></a>
-<span class="sourceLineNo">036</span> * StoreHotnessProtector is designed to help limit the concurrency of puts with dense columns, it<a name="line.36"></a>
-<span class="sourceLineNo">037</span> * does best-effort to avoid exhausting all RS's handlers. When a lot of clients write requests with<a name="line.37"></a>
-<span class="sourceLineNo">038</span> * dense (hundreds) columns to a Store at the same time, it will lead to blocking of RS because CSLM<a name="line.38"></a>
-<span class="sourceLineNo">039</span> * degrades when concurrency goes up. It's not a kind of throttling. Throttling is user-oriented,<a name="line.39"></a>
-<span class="sourceLineNo">040</span> * while StoreHotnessProtector is system-oriented, RS-self-protected mechanism.<a name="line.40"></a>
-<span class="sourceLineNo">041</span> * &lt;p&gt;<a name="line.41"></a>
-<span class="sourceLineNo">042</span> * There are three key parameters:<a name="line.42"></a>
-<span class="sourceLineNo">043</span> * &lt;p&gt;<a name="line.43"></a>
-<span class="sourceLineNo">044</span> * 1. parallelPutToStoreThreadLimitCheckMinColumnCount: If the amount of columns exceed this<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * threshold, the HotProtector will work, 100 by default<a name="line.45"></a>
+<span class="sourceLineNo">024</span>import org.apache.hadoop.conf.Configuration;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.Cell;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.regionserver.Region;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.regionserver.Store;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.31"></a>
+<span class="sourceLineNo">032</span><a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.slf4j.Logger;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.slf4j.LoggerFactory;<a name="line.34"></a>
+<span class="sourceLineNo">035</span><a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>/**<a name="line.38"></a>
+<span class="sourceLineNo">039</span> * StoreHotnessProtector is designed to help limit the concurrency of puts with dense columns, it<a name="line.39"></a>
+<span class="sourceLineNo">040</span> * does best-effort to avoid exhausting all RS's handlers. When a lot of clients write requests with<a name="line.40"></a>
+<span class="sourceLineNo">041</span> * dense (hundreds) columns to a Store at the same time, it will lead to blocking of RS because CSLM<a name="line.41"></a>
+<span class="sourceLineNo">042</span> * degrades when concurrency goes up. It's not a kind of throttling. Throttling is user-oriented,<a name="line.42"></a>
+<span class="sourceLineNo">043</span> * while StoreHotnessProtector is system-oriented, RS-self-protected mechanism.<a name="line.43"></a>
+<span class="sourceLineNo">044</span> * &lt;p&gt;<a name="line.44"></a>
+<span class="sourceLineNo">045</span> * There are three key parameters:<a name="line.45"></a>
 <span class="sourceLineNo">046</span> * &lt;p&gt;<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * 2. parallelPutToStoreThreadLimit: The amount of concurrency allowed to write puts to a Store at<a name="line.47"></a>
-<span class="sourceLineNo">048</span> * the same time.<a name="line.48"></a>
+<span class="sourceLineNo">047</span> * 1. parallelPutToStoreThreadLimitCheckMinColumnCount: If the amount of columns exceed this<a name="line.47"></a>
+<span class="sourceLineNo">048</span> * threshold, the HotProtector will work, 100 by default<a name="line.48"></a>
 <span class="sourceLineNo">049</span> * &lt;p&gt;<a name="line.49"></a>
-<span class="sourceLineNo">050</span> * 3. parallelPreparePutToStoreThreadLimit: The amount of concurrency allowed to<a name="line.50"></a>
-<span class="sourceLineNo">051</span> * prepare writing puts to a Store at the same time.<a name="line.51"></a>
+<span class="sourceLineNo">050</span> * 2. parallelPutToStoreThreadLimit: The amount of concurrency allowed to write puts to a Store at<a name="line.50"></a>
+<span class="sourceLineNo">051</span> * the same time.<a name="line.51"></a>
 <span class="sourceLineNo">052</span> * &lt;p&gt;<a name="line.52"></a>
-<span class="sourceLineNo">053</span> * Notice that our writing pipeline includes three key process: MVCC acquire, writing MemStore, and<a name="line.53"></a>
-<span class="sourceLineNo">054</span> * WAL. Only limit the concurrency of writing puts to Store(parallelPutToStoreThreadLimit) is not<a name="line.54"></a>
-<span class="sourceLineNo">055</span> * enough since the actual concurrency of puts may still exceed the limit when MVCC contention or<a name="line.55"></a>
-<span class="sourceLineNo">056</span> * slow WAL sync happens. This is why parallelPreparePutToStoreThreadLimit is needed.<a name="line.56"></a>
-<span class="sourceLineNo">057</span> * &lt;p&gt;<a name="line.57"></a>
-<span class="sourceLineNo">058</span> * This protector is enabled by default and could be turned off by setting<a name="line.58"></a>
-<span class="sourceLineNo">059</span> * hbase.region.store.parallel.put.limit to 0, supporting online configuration change.<a name="line.59"></a>
-<span class="sourceLineNo">060</span> */<a name="line.60"></a>
-<span class="sourceLineNo">061</span>@InterfaceAudience.Private<a name="line.61"></a>
-<span class="sourceLineNo">062</span>public class StoreHotnessProtector {<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  private static final Log LOG = LogFactory.getLog(StoreHotnessProtector.class);<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  private volatile int parallelPutToStoreThreadLimit;<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>  private volatile int parallelPreparePutToStoreThreadLimit;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  public final static String PARALLEL_PUT_STORE_THREADS_LIMIT =<a name="line.67"></a>
-<span class="sourceLineNo">068</span>      "hbase.region.store.parallel.put.limit";<a name="line.68"></a>
-<span class="sourceLineNo">069</span>  public final static String PARALLEL_PREPARE_PUT_STORE_MULTIPLIER =<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      "hbase.region.store.parallel.prepare.put.multiplier";<a name="line.70"></a>
-<span class="sourceLineNo">071</span>  private final static int DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT = 10;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>  private volatile int parallelPutToStoreThreadLimitCheckMinColumnCount;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  public final static String PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_COUNT =<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      "hbase.region.store.parallel.put.limit.min.column.count";<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  private final static int DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_NUM = 100;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  private final static int DEFAULT_PARALLEL_PREPARE_PUT_STORE_MULTIPLIER = 2;<a name="line.76"></a>
-<span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private final Map&lt;byte[], AtomicInteger&gt; preparePutToStoreMap =<a name="line.78"></a>
-<span class="sourceLineNo">079</span>      new ConcurrentSkipListMap&lt;&gt;(Bytes.BYTES_RAWCOMPARATOR);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  private final Region region;<a name="line.80"></a>
-<span class="sourceLineNo">081</span><a name="line.81"></a>
-<span class="sourceLineNo">082</span>  public StoreHotnessProtector(Region region, Configuration conf) {<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    init(conf);<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    this.region = region;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  public void init(Configuration conf) {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    this.parallelPutToStoreThreadLimit =<a name="line.88"></a>
-<span class="sourceLineNo">089</span>        conf.getInt(PARALLEL_PUT_STORE_THREADS_LIMIT, DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    this.parallelPreparePutToStoreThreadLimit = conf.getInt(PARALLEL_PREPARE_PUT_STORE_MULTIPLIER,<a name="line.90"></a>
-<span class="sourceLineNo">091</span>        DEFAULT_PARALLEL_PREPARE_PUT_STORE_MULTIPLIER) * parallelPutToStoreThreadLimit;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    this.parallelPutToStoreThreadLimitCheckMinColumnCount =<a name="line.92"></a>
-<span class="sourceLineNo">093</span>        conf.getInt(PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_COUNT,<a name="line.93"></a>
-<span class="sourceLineNo">094</span>            DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_NUM);<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  }<a name="line.96"></a>
-<span class="sourceLineNo">097</span><a name="line.97"></a>
-<span class="sourceLineNo">098</span>  public void update(Configuration conf) {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    init(conf);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    preparePutToStoreMap.clear();<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    LOG.debug("update config: " + toString());<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  }<a name="line.102"></a>
-<span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>  public void start(Map&lt;byte[], List&lt;Cell&gt;&gt; familyMaps) throws RegionTooBusyException {<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    if (!isEnable()) {<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      return;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    }<a name="line.107"></a>
-<span class="sourceLineNo">108</span><a name="line.108"></a>
-<span class="sourceLineNo">109</span>    String tooBusyStore = null;<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>    for (Map.Entry&lt;byte[], List&lt;Cell&gt;&gt; e : familyMaps.entrySet()) {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>      Store store = this.region.getStore(e.getKey());<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      if (store == null || e.getValue() == null) {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>        continue;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>      }<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>      if (e.getValue().size() &gt; this.parallelPutToStoreThreadLimitCheckMinColumnCount) {<a name="line.117"></a>
-<span class="sourceLineNo">118</span><a name="line.118"></a>
-<span class="sourceLineNo">119</span>        //we need to try to add #preparePutCount at first because preparePutToStoreMap will be<a name="line.119"></a>
-<span class="sourceLineNo">120</span>        //cleared when changing the configuration.<a name="line.120"></a>
-<span class="sourceLineNo">121</span>        preparePutToStoreMap.putIfAbsent(e.getKey(), new AtomicInteger());<a name="line.121"></a>
-<span class="sourceLineNo">122</span>        AtomicInteger preparePutCounter = preparePutToStoreMap.get(e.getKey());<a name="line.122"></a>
-<span class="sourceLineNo">123</span>        if (preparePutCounter == null) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>          preparePutCounter = new AtomicInteger();<a name="line.124"></a>
-<span class="sourceLineNo">125</span>          preparePutToStoreMap.putIfAbsent(e.getKey(), preparePutCounter);<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>        int preparePutCount = preparePutCounter.incrementAndGet();<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        if (store.getCurrentParallelPutCount() &gt; this.parallelPutToStoreThreadLimit<a name="line.128"></a>
-<span class="sourceLineNo">129</span>            || preparePutCount &gt; this.parallelPreparePutToStoreThreadLimit) {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>          tooBusyStore = (tooBusyStore == null ?<a name="line.130"></a>
-<span class="sourceLineNo">131</span>              store.getColumnFamilyName() :<a name="line.131"></a>
-<span class="sourceLineNo">132</span>              tooBusyStore + "," + store.getColumnFamilyName());<a name="line.132"></a>
-<span class="sourceLineNo">133</span>        }<a name="line.133"></a>
-<span class="sourceLineNo">134</span><a name="line.134"></a>
-<span class="sourceLineNo">135</span>        if (LOG.isTraceEnabled()) {<a name="line.135"></a>
-<span class="sourceLineNo">136</span>          LOG.trace(store.getColumnFamilyName() + ": preparePutCount=" + preparePutCount<a name="line.136"></a>
-<span class="sourceLineNo">137</span>              + "; currentParallelPutCount=" + store.getCurrentParallelPutCount());<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        }<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      }<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    if (tooBusyStore != null) {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      String msg =<a name="line.143"></a>
-<span class="sourceLineNo">144</span>          "StoreTooBusy," + this.region.getRegionInfo().getRegionNameAsString() + ":" + tooBusyStore<a name="line.144"></a>
-<span class="sourceLineNo">145</span>              + " Above parallelPutToStoreThreadLimit(" + this.parallelPutToStoreThreadLimit + ")";<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      if (LOG.isTraceEnabled()) {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>        LOG.trace(msg);<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      }<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      throw new RegionTooBusyException(msg);<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span><a name="line.152"></a>
-<span class="sourceLineNo">153</span>  public void finish(Map&lt;byte[], List&lt;Cell&gt;&gt; familyMaps) {<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    if (!isEnable()) {<a name="line.154"></a>
-<span class="sourceLineNo">155</span>      return;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    }<a name="line.156"></a>
-<span class="sourceLineNo">157</span><a name="line.157"></a>
-<span class="sourceLineNo">158</span>    for (Map.Entry&lt;byte[], List&lt;Cell&gt;&gt; e : familyMaps.entrySet()) {<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      Store store = this.region.getStore(e.getKey());<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      if (store == null || e.getValue() == null) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>        continue;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      }<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      if (e.getValue().size() &gt; this.parallelPutToStoreThreadLimitCheckMinColumnCount) {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        AtomicInteger counter = preparePutToStoreMap.get(e.getKey());<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        // preparePutToStoreMap will be cleared when changing the configuration, so it may turn<a name="line.165"></a>
-<span class="sourceLineNo">166</span>        // into a negative value. It will be not accuracy in a short time, it's a trade-off for<a name="line.166"></a>
-<span class="sourceLineNo">167</span>        // performance.<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        if (counter != null &amp;&amp; counter.decrementAndGet() &lt; 0) {<a name="line.168"></a>
-<span class="sourceLineNo">169</span>          counter.incrementAndGet();<a name="line.169"></a>
-<span class="sourceLineNo">170</span>        }<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      }<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    }<a name="line.172"></a>
-<span class="sourceLineNo">173</span>  }<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>  public String toString() {<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    return "StoreHotnessProtector, parallelPutToStoreThreadLimit="<a name="line.176"></a>
-<span class="sourceLineNo">177</span>        + this.parallelPutToStoreThreadLimit + " ; minColumnNum="<a name="line.177"></a>
-<span class="sourceLineNo">178</span>        + this.parallelPutToStoreThreadLimitCheckMinColumnCount + " ; preparePutThreadLimit="<a name="line.178"></a>
-<span class="sourceLineNo">179</span>        + this.parallelPreparePutToStoreThreadLimit + " ; hotProtect now " + (this.isEnable() ?<a name="line.179"></a>
-<span class="sourceLineNo">180</span>        "enable" :<a name="line.180"></a>
-<span class="sourceLineNo">181</span>        "disable");<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  }<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  public boolean isEnable() {<a name="line.184"></a>
-<span class="sourceLineNo">185</span>    // feature is enabled when parallelPutToStoreThreadLimit &gt; 0<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    return this.parallelPutToStoreThreadLimit &gt; 0;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  }<a name="line.187"></a>
-<span class="sourceLineNo">188</span><a name="line.188"></a>
-<span class="sourceLineNo">189</span>  @VisibleForTesting<a name="line.189"></a>
-<span class="sourceLineNo">190</span>  Map&lt;byte[], AtomicInteger&gt; getPreparePutToStoreMap() {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>    return preparePutToStoreMap;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  }<a name="line.192"></a>
-<span class="sourceLineNo">193</span><a name="line.193"></a>
-<span class="sourceLineNo">194</span>  public static final long FIXED_SIZE =<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      ClassSize.align(ClassSize.OBJECT + 2 * ClassSize.REFERENCE + 3 * Bytes.SIZEOF_INT);<a name="line.195"></a>
-<span class="sourceLineNo">196</span>}<a name="line.196"></a>
+<span class="sourceLineNo">053</span> * 3. parallelPreparePutToStoreThreadLimit: The amount of concurrency allowed to<a name="line.53"></a>
+<span class="sourceLineNo">054</span> * prepare writing puts to a Store at the same time.<a name="line.54"></a>
+<span class="sourceLineNo">055</span> * &lt;p&gt;<a name="line.55"></a>
+<span class="sourceLineNo">056</span> * Notice that our writing pipeline includes three key process: MVCC acquire, writing MemStore, and<a name="line.56"></a>
+<span class="sourceLineNo">057</span> * WAL. Only limit the concurrency of writing puts to Store(parallelPutToStoreThreadLimit) is not<a name="line.57"></a>
+<span class="sourceLineNo">058</span> * enough since the actual concurrency of puts may still exceed the limit when MVCC contention or<a name="line.58"></a>
+<span class="sourceLineNo">059</span> * slow WAL sync happens. This is why parallelPreparePutToStoreThreadLimit is needed.<a name="line.59"></a>
+<span class="sourceLineNo">060</span> * &lt;p&gt;<a name="line.60"></a>
+<span class="sourceLineNo">061</span> * This protector is enabled by default and could be turned off by setting<a name="line.61"></a>
+<span class="sourceLineNo">062</span> * hbase.region.store.parallel.put.limit to 0, supporting online configuration change.<a name="line.62"></a>
+<span class="sourceLineNo">063</span> */<a name="line.63"></a>
+<span class="sourceLineNo">064</span>@InterfaceAudience.Private<a name="line.64"></a>
+<span class="sourceLineNo">065</span>public class StoreHotnessProtector {<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  private static final Logger LOG = LoggerFactory.getLogger(StoreHotnessProtector.class);<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  private volatile int parallelPutToStoreThreadLimit;<a name="line.67"></a>
+<span class="sourceLineNo">068</span><a name="line.68"></a>
+<span class="sourceLineNo">069</span>  private volatile int parallelPreparePutToStoreThreadLimit;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  public final static String PARALLEL_PUT_STORE_THREADS_LIMIT =<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      "hbase.region.store.parallel.put.limit";<a name="line.71"></a>
+<span class="sourceLineNo">072</span>  public final static String PARALLEL_PREPARE_PUT_STORE_MULTIPLIER =<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      "hbase.region.store.parallel.prepare.put.multiplier";<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  private final static int DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT = 10;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  private volatile int parallelPutToStoreThreadLimitCheckMinColumnCount;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>  public final static String PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_COUNT =<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      "hbase.region.store.parallel.put.limit.min.column.count";<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  private final static int DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_NUM = 100;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  private final static int DEFAULT_PARALLEL_PREPARE_PUT_STORE_MULTIPLIER = 2;<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  private final Map&lt;byte[], AtomicInteger&gt; preparePutToStoreMap =<a name="line.81"></a>
+<span class="sourceLineNo">082</span>      new ConcurrentSkipListMap&lt;&gt;(Bytes.BYTES_RAWCOMPARATOR);<a name="line.82"></a>
+<span class="sourceLineNo">083</span>  private final Region region;<a name="line.83"></a>
+<span class="sourceLineNo">084</span><a name="line.84"></a>
+<span class="sourceLineNo">085</span>  public StoreHotnessProtector(Region region, Configuration conf) {<a name="line.85"></a>
+<span class="sourceLineNo">086</span>    init(conf);<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    this.region = region;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  }<a name="line.88"></a>
+<span class="sourceLineNo">089</span><a name="line.89"></a>
+<span class="sourceLineNo">090</span>  public void init(Configuration conf) {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    this.parallelPutToStoreThreadLimit =<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        conf.getInt(PARALLEL_PUT_STORE_THREADS_LIMIT, DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    this.parallelPreparePutToStoreThreadLimit = conf.getInt(PARALLEL_PREPARE_PUT_STORE_MULTIPLIER,<a name="line.93"></a>
+<span class="sourceLineNo">094</span>        DEFAULT_PARALLEL_PREPARE_PUT_STORE_MULTIPLIER) * parallelPutToStoreThreadLimit;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    this.parallelPutToStoreThreadLimitCheckMinColumnCount =<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        conf.getInt(PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_COUNT,<a name="line.96"></a>
+<span class="sourceLineNo">097</span>            DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_NUM);<a name="line.97"></a>
+<span class="sourceLineNo">098</span><a name="line.98"></a>
+<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>  public void update(Configuration conf) {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    init(conf);<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    preparePutToStoreMap.clear();<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    LOG.debug("update config: " + toString());<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  public void start(Map&lt;byte[], List&lt;Cell&gt;&gt; familyMaps) throws RegionTooBusyException {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    if (!isEnable()) {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      return;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    }<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>    String tooBusyStore = null;<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>    for (Map.Entry&lt;byte[], List&lt;Cell&gt;&gt; e : familyMaps.entrySet()) {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      Store store = this.region.getStore(e.getKey());<a name="line.115"></a>
+<span class="sourceLineNo">116</span>      if (store == null || e.getValue() == null) {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>        continue;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      }<a name="line.118"></a>
+<span class="sourceLineNo">119</span><a name="line.119"></a>
+<span class="sourceLineNo">120</span>      if (e.getValue().size() &gt; this.parallelPutToStoreThreadLimitCheckMinColumnCount) {<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>        //we need to try to add #preparePutCount at first because preparePutToStoreMap will be<a name="line.122"></a>
+<span class="sourceLineNo">123</span>        //cleared when changing the configuration.<a name="line.123"></a>
+<span class="sourceLineNo">124</span>        preparePutToStoreMap.putIfAbsent(e.getKey(), new AtomicInteger());<a name="line.124"></a>
+<span class="sourceLineNo">125</span>        AtomicInteger preparePutCounter = preparePutToStoreMap.get(e.getKey());<a name="line.125"></a>
+<span class="sourceLineNo">126</span>        if (preparePutCounter == null) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>          preparePutCounter = new AtomicInteger();<a name="line.127"></a>
+<span class="sourceLineNo">128</span>          preparePutToStoreMap.putIfAbsent(e.getKey(), preparePutCounter);<a name="line.128"></a>
+<span class="sourceLineNo">129</span>        }<a name="line.129"></a>
+<span class="sourceLineNo">130</span>        int preparePutCount = preparePutCounter.incrementAndGet();<a name="line.130"></a>
+<span class="sourceLineNo">131</span>        if (store.getCurrentParallelPutCount() &gt; this.parallelPutToStoreThreadLimit<a name="line.131"></a>
+<span class="sourceLineNo">132</span>            || preparePutCount &gt; this.parallelPreparePutToStoreThreadLimit) {<a name="line.132"></a>
+<span class="sourceLineNo">133</span>          tooBusyStore = (tooBusyStore == null ?<a name="line.133"></a>
+<span class="sourceLineNo">134</span>              store.getColumnFamilyName() :<a name="line.134"></a>
+<span class="sourceLineNo">135</span>              tooBusyStore + "," + store.getColumnFamilyName());<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        }<a name="line.136"></a>
+<span class="sourceLineNo">137</span><a name="line.137"></a>
+<span class="sourceLineNo">138</span>        if (LOG.isTraceEnabled()) {<a name="line.138"></a>
+<span class="sourceLineNo">139</span>          LOG.trace(store.getColumnFamilyName() + ": preparePutCount=" + preparePutCount<a name="line.139"></a>
+<span class="sourceLineNo">140</span>              + "; currentParallelPutCount=" + store.getCurrentParallelPutCount());<a name="line.140"></a>
+<span class="sourceLineNo">141</span>        }<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      }<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>    if (tooBusyStore != null) {<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      String msg =<a name="line.146"></a>
+<span class="sourceLineNo">147</span>          "StoreTooBusy," + this.region.getRegionInfo().getRegionNameAsString() + ":" + tooBusyStore<a name="line.147"></a>
+<span class="sourceLineNo">148</span>              + " Above parallelPutToStoreThreadLimit(" + this.parallelPutToStoreThreadLimit + ")";<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      if (LOG.isTraceEnabled()) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        LOG.trace(msg);<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      }<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      throw new RegionTooBusyException(msg);<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    }<a name="line.153"></a>
+<span class="sourceLineNo">154</span>  }<a name="line.154"></a>
+<span class="sourceLineNo">155</span><a name="line.155"></a>
+<span class="sourceLineNo">156</span>  public void finish(Map&lt;byte[], List&lt;Cell&gt;&gt; familyMaps) {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    if (!isEnable()) {<a name="line.157"></a>
+<span class="sourceLineNo">158</span>      return;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>    for (Map.Entry&lt;byte[], List&lt;Cell&gt;&gt; e : familyMaps.entrySet()) {<a name="line.161"></a>
+<span class="sourceLineNo">162</span>      Store store = this.region.getStore(e.getKey());<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      if (store == null || e.getValue() == null) {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        continue;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      }<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      if (e.getValue().size() &gt; this.parallelPutToStoreThreadLimitCheckMinColumnCount) {<a name="line.166"></a>
+<span class="sourceLineNo">167</span>        AtomicInteger counter = preparePutToStoreMap.get(e.getKey());<a name="line.167"></a>
+<span class="sourceLineNo">168</span>        // preparePutToStoreMap will be cleared when changing the configuration, so it may turn<a name="line.168"></a>
+<span class="sourceLineNo">169</span>        // into a negative value. It will be not accuracy in a short time, it's a trade-off for<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        // performance.<a name="line.170"></a>
+<span class="sourceLineNo">171</span>        if (counter != null &amp;&amp; counter.decrementAndGet() &lt; 0) {<a name="line.171"></a>
+<span class="sourceLineNo">172</span>          counter.incrementAndGet();<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        }<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      }<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    }<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>  public String toString() {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    return "StoreHotnessProtector, parallelPutToStoreThreadLimit="<a name="line.179"></a>
+<span class="sourceLineNo">180</span>        + this.parallelPutToStoreThreadLimit + " ; minColumnNum="<a name="line.180"></a>
+<span class="sourceLineNo">181</span>        + this.parallelPutToStoreThreadLimitCheckMinColumnCount + " ; preparePutThreadLimit="<a name="line.181"></a>
+<span class="sourceLineNo">182</span>        + this.parallelPreparePutToStoreThreadLimit + " ; hotProtect now " + (this.isEnable() ?<a name="line.182"></a>
+<span class="sourceLineNo">183</span>        "enable" :<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        "disable");<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  }<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>  public boolean isEnable() {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    // feature is enabled when parallelPutToStoreThreadLimit &gt; 0<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    return this.parallelPutToStoreThreadLimit &gt; 0;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>  }<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>  @VisibleForTesting<a name="line.192"></a>
+<span class="sourceLineNo">193</span>  Map&lt;byte[], AtomicInteger&gt; getPreparePutToStoreMap() {<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    return preparePutToStoreMap;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  public static final long FIXED_SIZE =<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      ClassSize.align(ClassSize.OBJECT + 2 * ClassSize.REFERENCE + 3 * Bytes.SIZEOF_INT);<a name="line.198"></a>
+<span class="sourceLineNo">199</span>}<a name="line.199"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html
index a9224f0..e3e1a34 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.html
@@ -33,20 +33,20 @@
 <span class="sourceLineNo">025</span>import java.util.Set;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.TreeSet;<a name="line.26"></a>
 <span class="sourceLineNo">027</span><a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.commons.collections.CollectionUtils;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.Cell;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.39"></a>
-<span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.41"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.Cell;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.38"></a>
+<span class="sourceLineNo">039</span><a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.41"></a>
 <span class="sourceLineNo">042</span><a name="line.42"></a>
 <span class="sourceLineNo">043</span>/**<a name="line.43"></a>
 <span class="sourceLineNo">044</span> * A WAL Entry for {@link AbstractFSWAL} implementation.  Immutable.<a name="line.44"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html b/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
index c90e6fa..338eb9c 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.html
@@ -37,30 +37,30 @@
 <span class="sourceLineNo">029</span>import java.util.SortedSet;<a name="line.29"></a>
 <span class="sourceLineNo">030</span>import java.util.TreeSet;<a name="line.30"></a>
 <span class="sourceLineNo">031</span>import java.util.stream.Collectors;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.commons.collections.CollectionUtils;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.conf.Configuration;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.fs.Path;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.HConstants;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.ServerName;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.zookeeper.KeeperException;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.zookeeper.KeeperException.BadVersionException;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.zookeeper.KeeperException.NoNodeException;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.zookeeper.KeeperException.NodeExistsException;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.zookeeper.KeeperException.NotEmptyException;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.zookeeper.data.Stat;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.slf4j.Logger;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.slf4j.LoggerFactory;<a name="line.53"></a>
-<span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.55"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.conf.Configuration;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.fs.Path;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.HConstants;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.ServerName;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.exceptions.DeserializationException;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.zookeeper.ZNodePaths;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.zookeeper.KeeperException;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.zookeeper.KeeperException.BadVersionException;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.zookeeper.KeeperException.NoNodeException;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.zookeeper.KeeperException.NodeExistsException;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.zookeeper.KeeperException.NotEmptyException;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.zookeeper.data.Stat;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.slf4j.Logger;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.slf4j.LoggerFactory;<a name="line.52"></a>
+<span class="sourceLineNo">053</span><a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.55"></a>
 <span class="sourceLineNo">056</span><a name="line.56"></a>
 <span class="sourceLineNo">057</span>/**<a name="line.57"></a>
 <span class="sourceLineNo">058</span> * ZK based replication queue storage.<a name="line.58"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
index e606e82..a242321 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
@@ -46,18 +46,18 @@
 <span class="sourceLineNo">038</span>import java.util.Iterator;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import java.util.List;<a name="line.39"></a>
 <span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.commons.collections.CollectionUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.io.RawComparator;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableComparator;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.io.WritableUtils;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.52"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.Cell;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.io.RawComparator;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.io.WritableComparator;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import com.google.protobuf.ByteString;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
index e606e82..a242321 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.Comparer.html
@@ -46,18 +46,18 @@
 <span class="sourceLineNo">038</span>import java.util.Iterator;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import java.util.List;<a name="line.39"></a>
 <span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.commons.collections.CollectionUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.io.RawComparator;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableComparator;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.io.WritableUtils;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.52"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.Cell;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.io.RawComparator;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.io.WritableComparator;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import com.google.protobuf.ByteString;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html
index e606e82..a242321 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html
@@ -46,18 +46,18 @@
 <span class="sourceLineNo">038</span>import java.util.Iterator;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import java.util.List;<a name="line.39"></a>
 <span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.commons.collections.CollectionUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.io.RawComparator;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableComparator;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.io.WritableUtils;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.52"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.Cell;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.io.RawComparator;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.io.WritableComparator;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import com.google.protobuf.ByteString;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html
index e606e82..a242321 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html
@@ -46,18 +46,18 @@
 <span class="sourceLineNo">038</span>import java.util.Iterator;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import java.util.List;<a name="line.39"></a>
 <span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.commons.collections.CollectionUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.io.RawComparator;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableComparator;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.io.WritableUtils;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.52"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.Cell;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.io.RawComparator;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.io.WritableComparator;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import com.google.protobuf.ByteString;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html
index e606e82..a242321 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.html
@@ -46,18 +46,18 @@
 <span class="sourceLineNo">038</span>import java.util.Iterator;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import java.util.List;<a name="line.39"></a>
 <span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.commons.collections.CollectionUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.io.RawComparator;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableComparator;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.io.WritableUtils;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.52"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.Cell;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.io.RawComparator;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.io.WritableComparator;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import com.google.protobuf.ByteString;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
index e606e82..a242321 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
@@ -46,18 +46,18 @@
 <span class="sourceLineNo">038</span>import java.util.Iterator;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import java.util.List;<a name="line.39"></a>
 <span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.commons.collections.CollectionUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.io.RawComparator;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableComparator;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.io.WritableUtils;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.52"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.Cell;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.io.RawComparator;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.io.WritableComparator;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import com.google.protobuf.ByteString;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
index e606e82..a242321 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
@@ -46,18 +46,18 @@
 <span class="sourceLineNo">038</span>import java.util.Iterator;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import java.util.List;<a name="line.39"></a>
 <span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.commons.collections.CollectionUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.io.RawComparator;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableComparator;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.io.WritableUtils;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.52"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.Cell;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.io.RawComparator;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.io.WritableComparator;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import com.google.protobuf.ByteString;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.BoundedLogWriterCreationOutputSink.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.BoundedLogWriterCreationOutputSink.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.BoundedLogWriterCreationOutputSink.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.BoundedLogWriterCreationOutputSink.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.BoundedLogWriterCreationOutputSink.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>


[10/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
index 7d1dba6..11f9915 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.LogDelegate.html
@@ -50,114 +50,114 @@
 <span class="sourceLineNo">042</span>import java.util.concurrent.atomic.AtomicBoolean;<a name="line.42"></a>
 <span class="sourceLineNo">043</span>import java.util.concurrent.atomic.AtomicLong;<a name="line.43"></a>
 <span class="sourceLineNo">044</span>import java.util.concurrent.atomic.LongAdder;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.commons.collections.CollectionUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.conf.Configuration;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.fs.Path;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.Cell;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.Server;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.ServerName;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.TableName;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.Append;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Get;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.Put;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Result;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.Row;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.net.Address;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.User;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.slf4j.Logger;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.slf4j.LoggerFactory;<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.152"></a>
+<span class="sourceLineNo">045</span>import org.apache.commons.lang3.mutable.MutableObject;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.conf.Configuration;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.fs.Path;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.ByteBufferExtendedCell;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.CacheEvictionStats;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.Cell;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.CellScannable;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.HBaseIOException;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HConstants;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.MultiActionResultTooLarge;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.Server;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.ServerName;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.TableName;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.client.Append;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.client.ConnectionUtils;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.client.Get;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.client.Put;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.client.Result;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.client.Row;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.client.VersionInfoUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.conf.ConfigurationObserver;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.exceptions.ScannerResetException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.ipc.HBaseRpcController;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.ipc.PriorityFunction;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.ipc.QosPriority;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.ipc.RpcCallContext;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.ipc.RpcCallback;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.ipc.RpcServerFactory;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.ipc.RpcServerInterface;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.ipc.ServerRpcController;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.master.MasterRpcServices;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.net.Address;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.procedure2.RSProcedureCallable;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.quotas.OperationQuota;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.quotas.QuotaUtil;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.regionserver.Leases.Lease;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.regionserver.Region.Operation;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.security.Superusers;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.security.User;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.security.access.AccessChecker;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.security.access.Permission;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.util.DNS;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.util.Strings;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.slf4j.Logger;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.slf4j.LoggerFactory;<a name="line.140"></a>
+<span class="sourceLineNo">141</span><a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hbase.thirdparty.com.google.common.cache.Cache;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hbase.thirdparty.com.google.protobuf.Message;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.152"></a>
 <span class="sourceLineNo">153</span><a name="line.153"></a>
 <span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.154"></a>
 <span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;<a name="line.155"></a>


[12/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.PrepareFlushResult.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RegionScannerImpl.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>


[03/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html
index af4f61a..1d9d491 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html
@@ -33,160 +33,161 @@
 <span class="sourceLineNo">025</span>import java.util.List;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.concurrent.TimeUnit;<a name="line.26"></a>
 <span class="sourceLineNo">027</span>import java.util.stream.Collectors;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.commons.logging.Log;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.commons.logging.LogFactory;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.TableName;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.regionserver.HRegionServer;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.testclassification.ClientTests;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.util.JVMClusterUtil;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.io.IOUtils;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.junit.After;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.junit.AfterClass;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.junit.Before;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.junit.BeforeClass;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.junit.ClassRule;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.junit.Rule;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.junit.Test;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.junit.experimental.categories.Category;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.junit.rules.TestName;<a name="line.48"></a>
-<span class="sourceLineNo">049</span><a name="line.49"></a>
-<span class="sourceLineNo">050</span>@Category({MediumTests.class, ClientTests.class})<a name="line.50"></a>
-<span class="sourceLineNo">051</span>public class TestFlushFromClient {<a name="line.51"></a>
-<span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>  @ClassRule<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.54"></a>
-<span class="sourceLineNo">055</span>      HBaseClassTestRule.forClass(TestFlushFromClient.class);<a name="line.55"></a>
-<span class="sourceLineNo">056</span><a name="line.56"></a>
-<span class="sourceLineNo">057</span>  private static final Log LOG = LogFactory.getLog(TestFlushFromClient.class);<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  private static AsyncConnection asyncConn;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>  private static final byte[][] SPLITS = new byte[][]{Bytes.toBytes("3"), Bytes.toBytes("7")};<a name="line.60"></a>
-<span class="sourceLineNo">061</span>  private static final List&lt;byte[]&gt; ROWS = Arrays.asList(<a name="line.61"></a>
-<span class="sourceLineNo">062</span>    Bytes.toBytes("1"),<a name="line.62"></a>
-<span class="sourceLineNo">063</span>    Bytes.toBytes("4"),<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    Bytes.toBytes("8"));<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  private static final byte[] FAMILY = Bytes.toBytes("f1");<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>  @Rule<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  public TestName name = new TestName();<a name="line.68"></a>
-<span class="sourceLineNo">069</span><a name="line.69"></a>
-<span class="sourceLineNo">070</span>  public TableName tableName;<a name="line.70"></a>
-<span class="sourceLineNo">071</span><a name="line.71"></a>
-<span class="sourceLineNo">072</span>  @BeforeClass<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  public static void setUpBeforeClass() throws Exception {<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    TEST_UTIL.startMiniCluster(ROWS.size());<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    asyncConn = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  }<a name="line.76"></a>
-<span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  @AfterClass<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  public static void tearDownAfterClass() throws Exception {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>    IOUtils.cleanup(null, asyncConn);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.81"></a>
-<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  @Before<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public void setUp() throws Exception {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    tableName = TableName.valueOf(name.getMethodName());<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    try (Table t = TEST_UTIL.createTable(tableName, FAMILY, SPLITS)) {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>      List&lt;Put&gt; puts = ROWS.stream().map(r -&gt; new Put(r)).collect(Collectors.toList());<a name="line.88"></a>
-<span class="sourceLineNo">089</span>      for (int i = 0; i != 20; ++i) {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>        byte[] value = Bytes.toBytes(i);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>        puts.forEach(p -&gt; p.addColumn(FAMILY, value, value));<a name="line.91"></a>
-<span class="sourceLineNo">092</span>      }<a name="line.92"></a>
-<span class="sourceLineNo">093</span>      t.put(puts);<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    }<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    assertFalse(getRegionInfo().isEmpty());<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    assertTrue(getRegionInfo().stream().allMatch(r -&gt; r.getMemStoreDataSize() != 0));<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  }<a name="line.97"></a>
-<span class="sourceLineNo">098</span><a name="line.98"></a>
-<span class="sourceLineNo">099</span>  @After<a name="line.99"></a>
-<span class="sourceLineNo">100</span>  public void tearDown() throws Exception {<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    for (TableDescriptor htd : TEST_UTIL.getAdmin().listTableDescriptors()) {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      LOG.info("Tear down, remove table=" + htd.getTableName());<a name="line.102"></a>
-<span class="sourceLineNo">103</span>      TEST_UTIL.deleteTable(htd.getTableName());<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    }<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
-<span class="sourceLineNo">106</span><a name="line.106"></a>
-<span class="sourceLineNo">107</span>  @Test<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  public void testFlushTable() throws Exception {<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    try (Admin admin = TEST_UTIL.getAdmin()) {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      admin.flush(tableName);<a name="line.110"></a>
-<span class="sourceLineNo">111</span>      assertFalse(getRegionInfo().stream().anyMatch(r -&gt; r.getMemStoreDataSize() != 0));<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    }<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  }<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>  @Test<a name="line.115"></a>
-<span class="sourceLineNo">116</span>  public void testAsyncFlushTable() throws Exception {<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    AsyncAdmin admin = asyncConn.getAdmin();<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    admin.flush(tableName).get();<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    assertFalse(getRegionInfo().stream().anyMatch(r -&gt; r.getMemStoreDataSize() != 0));<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  @Test<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  public void testFlushRegion() throws Exception {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    try (Admin admin = TEST_UTIL.getAdmin()) {<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      for (HRegion r : getRegionInfo()) {<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        admin.flushRegion(r.getRegionInfo().getRegionName());<a name="line.126"></a>
-<span class="sourceLineNo">127</span>        TimeUnit.SECONDS.sleep(1);<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        assertEquals(0, r.getMemStoreDataSize());<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    }<a name="line.130"></a>
-<span class="sourceLineNo">131</span>  }<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>  @Test<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  public void testAsyncFlushRegion() throws Exception {<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    AsyncAdmin admin = asyncConn.getAdmin();<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    for (HRegion r : getRegionInfo()) {<a name="line.136"></a>
-<span class="sourceLineNo">137</span>      admin.flushRegion(r.getRegionInfo().getRegionName()).get();<a name="line.137"></a>
-<span class="sourceLineNo">138</span>      TimeUnit.SECONDS.sleep(1);<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      assertEquals(0, r.getMemStoreDataSize());<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  }<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>  @Test<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  public void testFlushRegionServer() throws Exception {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    try (Admin admin = TEST_UTIL.getAdmin()) {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      for (HRegionServer rs : TEST_UTIL.getHBaseCluster()<a name="line.146"></a>
-<span class="sourceLineNo">147</span>            .getLiveRegionServerThreads()<a name="line.147"></a>
-<span class="sourceLineNo">148</span>            .stream().map(JVMClusterUtil.RegionServerThread::getRegionServer)<a name="line.148"></a>
-<span class="sourceLineNo">149</span>            .collect(Collectors.toList())) {<a name="line.149"></a>
-<span class="sourceLineNo">150</span>        admin.flushRegionServer(rs.getServerName());<a name="line.150"></a>
-<span class="sourceLineNo">151</span>        assertFalse(getRegionInfo(rs).stream().anyMatch(r -&gt; r.getMemStoreDataSize() != 0));<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    }<a name="line.153"></a>
-<span class="sourceLineNo">154</span>  }<a name="line.154"></a>
-<span class="sourceLineNo">155</span><a name="line.155"></a>
-<span class="sourceLineNo">156</span>  @Test<a name="line.156"></a>
-<span class="sourceLineNo">157</span>  public void testAsyncFlushRegionServer() throws Exception {<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    AsyncAdmin admin = asyncConn.getAdmin();<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    for (HRegionServer rs : TEST_UTIL.getHBaseCluster()<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      .getLiveRegionServerThreads()<a name="line.160"></a>
-<span class="sourceLineNo">161</span>      .stream().map(JVMClusterUtil.RegionServerThread::getRegionServer)<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      .collect(Collectors.toList())) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      admin.flushRegionServer(rs.getServerName()).get();<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      assertFalse(getRegionInfo(rs).stream().anyMatch(r -&gt; r.getMemStoreDataSize() != 0));<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    }<a name="line.165"></a>
-<span class="sourceLineNo">166</span>  }<a name="line.166"></a>
-<span class="sourceLineNo">167</span><a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private List&lt;HRegion&gt; getRegionInfo() {<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    return TEST_UTIL.getHBaseCluster().getLiveRegionServerThreads().stream()<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      .map(JVMClusterUtil.RegionServerThread::getRegionServer)<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      .flatMap(r -&gt; r.getRegions().stream())<a name="line.171"></a>
-<span class="sourceLineNo">172</span>      .filter(r -&gt; r.getTableDescriptor().getTableName().equals(tableName))<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      .collect(Collectors.toList());<a name="line.173"></a>
-<span class="sourceLineNo">174</span>  }<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  private List&lt;HRegion&gt; getRegionInfo(HRegionServer rs) {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    return rs.getRegions().stream()<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      .filter(v -&gt; v.getTableDescriptor().getTableName().equals(tableName))<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      .collect(Collectors.toList());<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  }<a name="line.180"></a>
-<span class="sourceLineNo">181</span>}<a name="line.181"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.TableName;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.regionserver.HRegionServer;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.testclassification.ClientTests;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.util.JVMClusterUtil;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.io.IOUtils;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.junit.After;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.junit.AfterClass;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.junit.Before;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.junit.BeforeClass;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.junit.ClassRule;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.junit.Rule;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.junit.Test;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.junit.experimental.categories.Category;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.junit.rules.TestName;<a name="line.46"></a>
+<span class="sourceLineNo">047</span><a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>@Category({MediumTests.class, ClientTests.class})<a name="line.51"></a>
+<span class="sourceLineNo">052</span>public class TestFlushFromClient {<a name="line.52"></a>
+<span class="sourceLineNo">053</span><a name="line.53"></a>
+<span class="sourceLineNo">054</span>  @ClassRule<a name="line.54"></a>
+<span class="sourceLineNo">055</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.55"></a>
+<span class="sourceLineNo">056</span>      HBaseClassTestRule.forClass(TestFlushFromClient.class);<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>  private static final Logger LOG = LoggerFactory.getLogger(TestFlushFromClient.class);<a name="line.58"></a>
+<span class="sourceLineNo">059</span>  private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  private static AsyncConnection asyncConn;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  private static final byte[][] SPLITS = new byte[][]{Bytes.toBytes("3"), Bytes.toBytes("7")};<a name="line.61"></a>
+<span class="sourceLineNo">062</span>  private static final List&lt;byte[]&gt; ROWS = Arrays.asList(<a name="line.62"></a>
+<span class="sourceLineNo">063</span>    Bytes.toBytes("1"),<a name="line.63"></a>
+<span class="sourceLineNo">064</span>    Bytes.toBytes("4"),<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    Bytes.toBytes("8"));<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  private static final byte[] FAMILY = Bytes.toBytes("f1");<a name="line.66"></a>
+<span class="sourceLineNo">067</span><a name="line.67"></a>
+<span class="sourceLineNo">068</span>  @Rule<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  public TestName name = new TestName();<a name="line.69"></a>
+<span class="sourceLineNo">070</span><a name="line.70"></a>
+<span class="sourceLineNo">071</span>  public TableName tableName;<a name="line.71"></a>
+<span class="sourceLineNo">072</span><a name="line.72"></a>
+<span class="sourceLineNo">073</span>  @BeforeClass<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  public static void setUpBeforeClass() throws Exception {<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    TEST_UTIL.startMiniCluster(ROWS.size());<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    asyncConn = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();<a name="line.76"></a>
+<span class="sourceLineNo">077</span>  }<a name="line.77"></a>
+<span class="sourceLineNo">078</span><a name="line.78"></a>
+<span class="sourceLineNo">079</span>  @AfterClass<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  public static void tearDownAfterClass() throws Exception {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    IOUtils.cleanup(null, asyncConn);<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.82"></a>
+<span class="sourceLineNo">083</span>  }<a name="line.83"></a>
+<span class="sourceLineNo">084</span><a name="line.84"></a>
+<span class="sourceLineNo">085</span>  @Before<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  public void setUp() throws Exception {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    tableName = TableName.valueOf(name.getMethodName());<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    try (Table t = TEST_UTIL.createTable(tableName, FAMILY, SPLITS)) {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>      List&lt;Put&gt; puts = ROWS.stream().map(r -&gt; new Put(r)).collect(Collectors.toList());<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      for (int i = 0; i != 20; ++i) {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>        byte[] value = Bytes.toBytes(i);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        puts.forEach(p -&gt; p.addColumn(FAMILY, value, value));<a name="line.92"></a>
+<span class="sourceLineNo">093</span>      }<a name="line.93"></a>
+<span class="sourceLineNo">094</span>      t.put(puts);<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    }<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    assertFalse(getRegionInfo().isEmpty());<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    assertTrue(getRegionInfo().stream().allMatch(r -&gt; r.getMemStoreDataSize() != 0));<a name="line.97"></a>
+<span class="sourceLineNo">098</span>  }<a name="line.98"></a>
+<span class="sourceLineNo">099</span><a name="line.99"></a>
+<span class="sourceLineNo">100</span>  @After<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  public void tearDown() throws Exception {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    for (TableDescriptor htd : TEST_UTIL.getAdmin().listTableDescriptors()) {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>      LOG.info("Tear down, remove table=" + htd.getTableName());<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      TEST_UTIL.deleteTable(htd.getTableName());<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    }<a name="line.105"></a>
+<span class="sourceLineNo">106</span>  }<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>  @Test<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  public void testFlushTable() throws Exception {<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    try (Admin admin = TEST_UTIL.getAdmin()) {<a name="line.110"></a>
+<span class="sourceLineNo">111</span>      admin.flush(tableName);<a name="line.111"></a>
+<span class="sourceLineNo">112</span>      assertFalse(getRegionInfo().stream().anyMatch(r -&gt; r.getMemStoreDataSize() != 0));<a name="line.112"></a>
+<span class="sourceLineNo">113</span>    }<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  }<a name="line.114"></a>
+<span class="sourceLineNo">115</span><a name="line.115"></a>
+<span class="sourceLineNo">116</span>  @Test<a name="line.116"></a>
+<span class="sourceLineNo">117</span>  public void testAsyncFlushTable() throws Exception {<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    AsyncAdmin admin = asyncConn.getAdmin();<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    admin.flush(tableName).get();<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    assertFalse(getRegionInfo().stream().anyMatch(r -&gt; r.getMemStoreDataSize() != 0));<a name="line.120"></a>
+<span class="sourceLineNo">121</span>  }<a name="line.121"></a>
+<span class="sourceLineNo">122</span><a name="line.122"></a>
+<span class="sourceLineNo">123</span>  @Test<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  public void testFlushRegion() throws Exception {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    try (Admin admin = TEST_UTIL.getAdmin()) {<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      for (HRegion r : getRegionInfo()) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>        admin.flushRegion(r.getRegionInfo().getRegionName());<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        TimeUnit.SECONDS.sleep(1);<a name="line.128"></a>
+<span class="sourceLineNo">129</span>        assertEquals(0, r.getMemStoreDataSize());<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      }<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    }<a name="line.131"></a>
+<span class="sourceLineNo">132</span>  }<a name="line.132"></a>
+<span class="sourceLineNo">133</span><a name="line.133"></a>
+<span class="sourceLineNo">134</span>  @Test<a name="line.134"></a>
+<span class="sourceLineNo">135</span>  public void testAsyncFlushRegion() throws Exception {<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    AsyncAdmin admin = asyncConn.getAdmin();<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    for (HRegion r : getRegionInfo()) {<a name="line.137"></a>
+<span class="sourceLineNo">138</span>      admin.flushRegion(r.getRegionInfo().getRegionName()).get();<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      TimeUnit.SECONDS.sleep(1);<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      assertEquals(0, r.getMemStoreDataSize());<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    }<a name="line.141"></a>
+<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
+<span class="sourceLineNo">143</span><a name="line.143"></a>
+<span class="sourceLineNo">144</span>  @Test<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  public void testFlushRegionServer() throws Exception {<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    try (Admin admin = TEST_UTIL.getAdmin()) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>      for (HRegionServer rs : TEST_UTIL.getHBaseCluster()<a name="line.147"></a>
+<span class="sourceLineNo">148</span>            .getLiveRegionServerThreads()<a name="line.148"></a>
+<span class="sourceLineNo">149</span>            .stream().map(JVMClusterUtil.RegionServerThread::getRegionServer)<a name="line.149"></a>
+<span class="sourceLineNo">150</span>            .collect(Collectors.toList())) {<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        admin.flushRegionServer(rs.getServerName());<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        assertFalse(getRegionInfo(rs).stream().anyMatch(r -&gt; r.getMemStoreDataSize() != 0));<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      }<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
+<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>  @Test<a name="line.157"></a>
+<span class="sourceLineNo">158</span>  public void testAsyncFlushRegionServer() throws Exception {<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    AsyncAdmin admin = asyncConn.getAdmin();<a name="line.159"></a>
+<span class="sourceLineNo">160</span>    for (HRegionServer rs : TEST_UTIL.getHBaseCluster()<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      .getLiveRegionServerThreads()<a name="line.161"></a>
+<span class="sourceLineNo">162</span>      .stream().map(JVMClusterUtil.RegionServerThread::getRegionServer)<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      .collect(Collectors.toList())) {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      admin.flushRegionServer(rs.getServerName()).get();<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      assertFalse(getRegionInfo(rs).stream().anyMatch(r -&gt; r.getMemStoreDataSize() != 0));<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    }<a name="line.166"></a>
+<span class="sourceLineNo">167</span>  }<a name="line.167"></a>
+<span class="sourceLineNo">168</span><a name="line.168"></a>
+<span class="sourceLineNo">169</span>  private List&lt;HRegion&gt; getRegionInfo() {<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    return TEST_UTIL.getHBaseCluster().getLiveRegionServerThreads().stream()<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      .map(JVMClusterUtil.RegionServerThread::getRegionServer)<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      .flatMap(r -&gt; r.getRegions().stream())<a name="line.172"></a>
+<span class="sourceLineNo">173</span>      .filter(r -&gt; r.getTableDescriptor().getTableName().equals(tableName))<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      .collect(Collectors.toList());<a name="line.174"></a>
+<span class="sourceLineNo">175</span>  }<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>  private List&lt;HRegion&gt; getRegionInfo(HRegionServer rs) {<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    return rs.getRegions().stream()<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      .filter(v -&gt; v.getTableDescriptor().getTableName().equals(tableName))<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      .collect(Collectors.toList());<a name="line.180"></a>
+<span class="sourceLineNo">181</span>  }<a name="line.181"></a>
+<span class="sourceLineNo">182</span>}<a name="line.182"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html
index bcaa960..2993620 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.html
@@ -28,257 +28,258 @@
 <span class="sourceLineNo">020</span>import java.io.File;<a name="line.20"></a>
 <span class="sourceLineNo">021</span><a name="line.21"></a>
 <span class="sourceLineNo">022</span>import org.apache.commons.io.FileUtils;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import org.apache.commons.logging.Log;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import org.apache.commons.logging.LogFactory;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.HConstants;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.MiniHBaseCluster;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.ServerName;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.TableName;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.master.NoSuchProcedureException;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.regionserver.HRegionServer;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.junit.AfterClass;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.junit.Assert;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.junit.BeforeClass;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.junit.ClassRule;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.junit.Rule;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.junit.Test;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.junit.experimental.categories.Category;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.junit.rules.TestName;<a name="line.45"></a>
-<span class="sourceLineNo">046</span><a name="line.46"></a>
-<span class="sourceLineNo">047</span>@Category(MediumTests.class)<a name="line.47"></a>
-<span class="sourceLineNo">048</span>public class TestSeparateClientZKCluster {<a name="line.48"></a>
-<span class="sourceLineNo">049</span>  private static final Log LOG = LogFactory.getLog(TestSeparateClientZKCluster.class);<a name="line.49"></a>
-<span class="sourceLineNo">050</span>  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.50"></a>
-<span class="sourceLineNo">051</span>  private static final File clientZkDir = new File("/tmp/TestSeparateClientZKCluster");<a name="line.51"></a>
-<span class="sourceLineNo">052</span>  private static final int ZK_SESSION_TIMEOUT = 5000;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>  private static MiniZooKeeperCluster clientZkCluster;<a name="line.53"></a>
-<span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>  private final byte[] family = Bytes.toBytes("cf");<a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private final byte[] qualifier = Bytes.toBytes("c1");<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  private final byte[] row = Bytes.toBytes("row");<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  private final byte[] value = Bytes.toBytes("v1");<a name="line.58"></a>
-<span class="sourceLineNo">059</span>  private final byte[] newVal = Bytes.toBytes("v2");<a name="line.59"></a>
-<span class="sourceLineNo">060</span><a name="line.60"></a>
-<span class="sourceLineNo">061</span>  @Rule<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  public TestName name = new TestName();<a name="line.62"></a>
-<span class="sourceLineNo">063</span><a name="line.63"></a>
-<span class="sourceLineNo">064</span>  @ClassRule<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.65"></a>
-<span class="sourceLineNo">066</span>      HBaseClassTestRule.forClass(TestSeparateClientZKCluster.class);<a name="line.66"></a>
-<span class="sourceLineNo">067</span><a name="line.67"></a>
-<span class="sourceLineNo">068</span>  @BeforeClass<a name="line.68"></a>
-<span class="sourceLineNo">069</span>  public static void beforeAllTests() throws Exception {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    int clientZkPort = 21828;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    clientZkCluster = new MiniZooKeeperCluster(TEST_UTIL.getConfiguration());<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    clientZkCluster.setDefaultClientPort(clientZkPort);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    clientZkCluster.startup(clientZkDir);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    // reduce the retry number and start log counter<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2);<a name="line.75"></a>
-<span class="sourceLineNo">076</span>    TEST_UTIL.getConfiguration().setInt("hbase.client.start.log.errors.counter", -1);<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    TEST_UTIL.getConfiguration().setInt("zookeeper.recovery.retry", 1);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    // core settings for testing client ZK cluster<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    TEST_UTIL.getConfiguration().set(HConstants.CLIENT_ZOOKEEPER_QUORUM, HConstants.LOCALHOST);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>    TEST_UTIL.getConfiguration().setInt(HConstants.CLIENT_ZOOKEEPER_CLIENT_PORT, clientZkPort);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    // reduce zk session timeout to easier trigger session expiration<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    TEST_UTIL.getConfiguration().setInt(HConstants.ZK_SESSION_TIMEOUT, ZK_SESSION_TIMEOUT);<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    // Start a cluster with 2 masters and 3 regionservers.<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    TEST_UTIL.startMiniCluster(2, 3);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
-<span class="sourceLineNo">086</span><a name="line.86"></a>
-<span class="sourceLineNo">087</span>  @AfterClass<a name="line.87"></a>
-<span class="sourceLineNo">088</span>  public static void afterAllTests() throws Exception {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    clientZkCluster.shutdown();<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    FileUtils.deleteDirectory(clientZkDir);<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  }<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>  @Test(timeout = 60000)<a name="line.94"></a>
-<span class="sourceLineNo">095</span>  public void testBasicOperation() throws Exception {<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    TableName tn = TableName.valueOf(name.getMethodName());<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    // create table<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    Connection conn = TEST_UTIL.getConnection();<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    Admin admin = conn.getAdmin();<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    HTable table = (HTable) conn.getTable(tn);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    try {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      ColumnFamilyDescriptorBuilder cfDescBuilder =<a name="line.102"></a>
-<span class="sourceLineNo">103</span>          ColumnFamilyDescriptorBuilder.newBuilder(family);<a name="line.103"></a>
-<span class="sourceLineNo">104</span>      TableDescriptorBuilder tableDescBuilder =<a name="line.104"></a>
-<span class="sourceLineNo">105</span>          TableDescriptorBuilder.newBuilder(tn).setColumnFamily(cfDescBuilder.build());<a name="line.105"></a>
-<span class="sourceLineNo">106</span>      admin.createTable(tableDescBuilder.build());<a name="line.106"></a>
-<span class="sourceLineNo">107</span>      // test simple get and put<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      Put put = new Put(row);<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      put.addColumn(family, qualifier, value);<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      table.put(put);<a name="line.110"></a>
-<span class="sourceLineNo">111</span>      Get get = new Get(row);<a name="line.111"></a>
-<span class="sourceLineNo">112</span>      Result result = table.get(get);<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      LOG.debug("Result: " + Bytes.toString(result.getValue(family, qualifier)));<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      Assert.assertArrayEquals(value, result.getValue(family, qualifier));<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    } finally {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      admin.close();<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      table.close();<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  @Test(timeout = 60000)<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  public void testMasterSwitch() throws Exception {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // get an admin instance and issue some request first<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    Connection conn = TEST_UTIL.getConnection();<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    Admin admin = conn.getAdmin();<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    LOG.debug("Tables: " + admin.listTableDescriptors());<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    try {<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      // switch active master<a name="line.129"></a>
-<span class="sourceLineNo">130</span>      HMaster master = cluster.getMaster();<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      master.stopMaster();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      while (!master.isShutDown()) {<a name="line.132"></a>
-<span class="sourceLineNo">133</span>        Thread.sleep(200);<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      }<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      while (cluster.getMaster() == null || !cluster.getMaster().isInitialized()) {<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        Thread.sleep(200);<a name="line.136"></a>
-<span class="sourceLineNo">137</span>      }<a name="line.137"></a>
-<span class="sourceLineNo">138</span>      // confirm client access still works<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      Assert.assertTrue(admin.balance(false));<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    } finally {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      admin.close();<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    }<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  }<a name="line.143"></a>
-<span class="sourceLineNo">144</span><a name="line.144"></a>
-<span class="sourceLineNo">145</span>  @Test(timeout = 60000)<a name="line.145"></a>
-<span class="sourceLineNo">146</span>  public void testMetaRegionMove() throws Exception {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    TableName tn = TableName.valueOf(name.getMethodName());<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    // create table<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    Connection conn = TEST_UTIL.getConnection();<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    Admin admin = conn.getAdmin();<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    HTable table = (HTable) conn.getTable(tn);<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    try {<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      ColumnFamilyDescriptorBuilder cfDescBuilder =<a name="line.154"></a>
-<span class="sourceLineNo">155</span>          ColumnFamilyDescriptorBuilder.newBuilder(family);<a name="line.155"></a>
-<span class="sourceLineNo">156</span>      TableDescriptorBuilder tableDescBuilder =<a name="line.156"></a>
-<span class="sourceLineNo">157</span>          TableDescriptorBuilder.newBuilder(tn).setColumnFamily(cfDescBuilder.build());<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      admin.createTable(tableDescBuilder.build());<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      // issue some requests to cache the region location<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      Put put = new Put(row);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>      put.addColumn(family, qualifier, value);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      table.put(put);<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      Get get = new Get(row);<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      Result result = table.get(get);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      // move meta region and confirm client could detect<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      byte[] destServerName = null;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>      for (RegionServerThread rst : cluster.getLiveRegionServerThreads()) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        ServerName name = rst.getRegionServer().getServerName();<a name="line.168"></a>
-<span class="sourceLineNo">169</span>        if (!name.equals(cluster.getServerHoldingMeta())) {<a name="line.169"></a>
-<span class="sourceLineNo">170</span>          destServerName = Bytes.toBytes(name.getServerName());<a name="line.170"></a>
-<span class="sourceLineNo">171</span>          break;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        }<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      admin.move(RegionInfoBuilder.FIRST_META_REGIONINFO.getEncodedNameAsBytes(), destServerName);<a name="line.174"></a>
-<span class="sourceLineNo">175</span>      LOG.debug("Finished moving meta");<a name="line.175"></a>
-<span class="sourceLineNo">176</span>      // invalidate client cache<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      RegionInfo region =<a name="line.177"></a>
-<span class="sourceLineNo">178</span>          table.getRegionLocator().getRegionLocation(row).getRegion();<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      ServerName currentServer = cluster.getServerHoldingRegion(tn, region.getRegionName());<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      for (RegionServerThread rst : cluster.getLiveRegionServerThreads()) {<a name="line.180"></a>
-<span class="sourceLineNo">181</span>        ServerName name = rst.getRegionServer().getServerName();<a name="line.181"></a>
-<span class="sourceLineNo">182</span>        if (!name.equals(currentServer)) {<a name="line.182"></a>
-<span class="sourceLineNo">183</span>          destServerName = Bytes.toBytes(name.getServerName());<a name="line.183"></a>
-<span class="sourceLineNo">184</span>          break;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>        }<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      }<a name="line.186"></a>
-<span class="sourceLineNo">187</span>      admin.move(region.getEncodedNameAsBytes(), destServerName);<a name="line.187"></a>
-<span class="sourceLineNo">188</span>      LOG.debug("Finished moving user region");<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      put = new Put(row);<a name="line.189"></a>
-<span class="sourceLineNo">190</span>      put.addColumn(family, qualifier, newVal);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      table.put(put);<a name="line.191"></a>
-<span class="sourceLineNo">192</span>      result = table.get(get);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      LOG.debug("Result: " + Bytes.toString(result.getValue(family, qualifier)));<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      Assert.assertArrayEquals(newVal, result.getValue(family, qualifier));<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    } finally {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      admin.close();<a name="line.196"></a>
-<span class="sourceLineNo">197</span>      table.close();<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    }<a name="line.198"></a>
-<span class="sourceLineNo">199</span>  }<a name="line.199"></a>
-<span class="sourceLineNo">200</span><a name="line.200"></a>
-<span class="sourceLineNo">201</span>  @Test(timeout = 120000)<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  public void testMetaMoveDuringClientZkClusterRestart() throws Exception {<a name="line.202"></a>
-<span class="sourceLineNo">203</span>    TableName tn = TableName.valueOf(name.getMethodName());<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    // create table<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    ClusterConnection conn = (ClusterConnection) TEST_UTIL.getConnection();<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    Admin admin = conn.getAdmin();<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    HTable table = (HTable) conn.getTable(tn);<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    try {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      ColumnFamilyDescriptorBuilder cfDescBuilder =<a name="line.209"></a>
-<span class="sourceLineNo">210</span>          ColumnFamilyDescriptorBuilder.newBuilder(family);<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      TableDescriptorBuilder tableDescBuilder =<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          TableDescriptorBuilder.newBuilder(tn).setColumnFamily(cfDescBuilder.build());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      admin.createTable(tableDescBuilder.build());<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      // put some data<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      Put put = new Put(row);<a name="line.215"></a>
-<span class="sourceLineNo">216</span>      put.addColumn(family, qualifier, value);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>      table.put(put);<a name="line.217"></a>
-<span class="sourceLineNo">218</span>      // invalid connection cache<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      conn.clearRegionCache();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      // stop client zk cluster<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      clientZkCluster.shutdown();<a name="line.221"></a>
-<span class="sourceLineNo">222</span>      // stop current meta server and confirm the server shutdown process<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      // is not affected by client ZK crash<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      int metaServerId = cluster.getServerWithMeta();<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      HRegionServer metaServer = cluster.getRegionServer(metaServerId);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      metaServer.stop("Stop current RS holding meta region");<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      while (!metaServer.isShutDown()) {<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        Thread.sleep(200);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      }<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      // wait for meta region online<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      try {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>        cluster.getMaster().getAssignmentManager()<a name="line.233"></a>
-<span class="sourceLineNo">234</span>          .waitForAssignment(RegionInfoBuilder.FIRST_META_REGIONINFO);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      } catch (NoSuchProcedureException e) {<a name="line.235"></a>
-<span class="sourceLineNo">236</span>        // we don't need to take any further action<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      // wait some long time to make sure we will retry sync data to client ZK until data set<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      Thread.sleep(10000);<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      clientZkCluster.startup(clientZkDir);<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      // new request should pass<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      Get get = new Get(row);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      Result result = table.get(get);<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      LOG.debug("Result: " + Bytes.toString(result.getValue(family, qualifier)));<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      Assert.assertArrayEquals(value, result.getValue(family, qualifier));<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    } finally {<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      admin.close();<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      table.close();<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    }<a name="line.249"></a>
-<span class="sourceLineNo">250</span>  }<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>  @Test(timeout = 60000)<a name="line.252"></a>
-<span class="sourceLineNo">253</span>  public void testAsyncTable() throws Exception {<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    TableName tn = TableName.valueOf(name.getMethodName());<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    ColumnFamilyDescriptorBuilder cfDescBuilder = ColumnFamilyDescriptorBuilder.newBuilder(family);<a name="line.255"></a>
-<span class="sourceLineNo">256</span>    TableDescriptorBuilder tableDescBuilder =<a name="line.256"></a>
-<span class="sourceLineNo">257</span>        TableDescriptorBuilder.newBuilder(tn).setColumnFamily(cfDescBuilder.build());<a name="line.257"></a>
-<span class="sourceLineNo">258</span>    try (AsyncConnection ASYNC_CONN =<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get()) {<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      ASYNC_CONN.getAdmin().createTable(tableDescBuilder.build()).get();<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      AsyncTable&lt;?&gt; table = ASYNC_CONN.getTable(tn);<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // put some data<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      Put put = new Put(row);<a name="line.263"></a>
-<span class="sourceLineNo">264</span>      put.addColumn(family, qualifier, value);<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      table.put(put).get();<a name="line.265"></a>
-<span class="sourceLineNo">266</span>      // get and verify<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      Get get = new Get(row);<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      Result result = table.get(get).get();<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      LOG.debug("Result: " + Bytes.toString(result.getValue(family, qualifier)));<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      Assert.assertArrayEquals(value, result.getValue(family, qualifier));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    }<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  }<a name="line.272"></a>
-<span class="sourceLineNo">273</span>}<a name="line.273"></a>
+<span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import org.apache.hadoop.hbase.HBaseTestingUtility;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.HConstants;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.MiniHBaseCluster;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.ServerName;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.TableName;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.master.NoSuchProcedureException;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.regionserver.HRegionServer;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.junit.AfterClass;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.junit.Assert;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.junit.BeforeClass;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.junit.ClassRule;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.junit.Rule;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.junit.Test;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.junit.experimental.categories.Category;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.junit.rules.TestName;<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.slf4j.Logger;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.slf4j.LoggerFactory;<a name="line.46"></a>
+<span class="sourceLineNo">047</span><a name="line.47"></a>
+<span class="sourceLineNo">048</span>@Category(MediumTests.class)<a name="line.48"></a>
+<span class="sourceLineNo">049</span>public class TestSeparateClientZKCluster {<a name="line.49"></a>
+<span class="sourceLineNo">050</span>  private static final Logger LOG = LoggerFactory.getLogger(TestSeparateClientZKCluster.class);<a name="line.50"></a>
+<span class="sourceLineNo">051</span>  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  private static final File clientZkDir = new File("/tmp/TestSeparateClientZKCluster");<a name="line.52"></a>
+<span class="sourceLineNo">053</span>  private static final int ZK_SESSION_TIMEOUT = 5000;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>  private static MiniZooKeeperCluster clientZkCluster;<a name="line.54"></a>
+<span class="sourceLineNo">055</span><a name="line.55"></a>
+<span class="sourceLineNo">056</span>  private final byte[] family = Bytes.toBytes("cf");<a name="line.56"></a>
+<span class="sourceLineNo">057</span>  private final byte[] qualifier = Bytes.toBytes("c1");<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  private final byte[] row = Bytes.toBytes("row");<a name="line.58"></a>
+<span class="sourceLineNo">059</span>  private final byte[] value = Bytes.toBytes("v1");<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  private final byte[] newVal = Bytes.toBytes("v2");<a name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>  @Rule<a name="line.62"></a>
+<span class="sourceLineNo">063</span>  public TestName name = new TestName();<a name="line.63"></a>
+<span class="sourceLineNo">064</span><a name="line.64"></a>
+<span class="sourceLineNo">065</span>  @ClassRule<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.66"></a>
+<span class="sourceLineNo">067</span>      HBaseClassTestRule.forClass(TestSeparateClientZKCluster.class);<a name="line.67"></a>
+<span class="sourceLineNo">068</span><a name="line.68"></a>
+<span class="sourceLineNo">069</span>  @BeforeClass<a name="line.69"></a>
+<span class="sourceLineNo">070</span>  public static void beforeAllTests() throws Exception {<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    int clientZkPort = 21828;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    clientZkCluster = new MiniZooKeeperCluster(TEST_UTIL.getConfiguration());<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    clientZkCluster.setDefaultClientPort(clientZkPort);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    clientZkCluster.startup(clientZkDir);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    // reduce the retry number and start log counter<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2);<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    TEST_UTIL.getConfiguration().setInt("hbase.client.start.log.errors.counter", -1);<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    TEST_UTIL.getConfiguration().setInt("zookeeper.recovery.retry", 1);<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    // core settings for testing client ZK cluster<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    TEST_UTIL.getConfiguration().set(HConstants.CLIENT_ZOOKEEPER_QUORUM, HConstants.LOCALHOST);<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    TEST_UTIL.getConfiguration().setInt(HConstants.CLIENT_ZOOKEEPER_CLIENT_PORT, clientZkPort);<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    // reduce zk session timeout to easier trigger session expiration<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    TEST_UTIL.getConfiguration().setInt(HConstants.ZK_SESSION_TIMEOUT, ZK_SESSION_TIMEOUT);<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    // Start a cluster with 2 masters and 3 regionservers.<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    TEST_UTIL.startMiniCluster(2, 3);<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  }<a name="line.86"></a>
+<span class="sourceLineNo">087</span><a name="line.87"></a>
+<span class="sourceLineNo">088</span>  @AfterClass<a name="line.88"></a>
+<span class="sourceLineNo">089</span>  public static void afterAllTests() throws Exception {<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    TEST_UTIL.shutdownMiniCluster();<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    clientZkCluster.shutdown();<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    FileUtils.deleteDirectory(clientZkDir);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
+<span class="sourceLineNo">094</span><a name="line.94"></a>
+<span class="sourceLineNo">095</span>  @Test(timeout = 60000)<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  public void testBasicOperation() throws Exception {<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    TableName tn = TableName.valueOf(name.getMethodName());<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    // create table<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    Connection conn = TEST_UTIL.getConnection();<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    Admin admin = conn.getAdmin();<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    HTable table = (HTable) conn.getTable(tn);<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    try {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>      ColumnFamilyDescriptorBuilder cfDescBuilder =<a name="line.103"></a>
+<span class="sourceLineNo">104</span>          ColumnFamilyDescriptorBuilder.newBuilder(family);<a name="line.104"></a>
+<span class="sourceLineNo">105</span>      TableDescriptorBuilder tableDescBuilder =<a name="line.105"></a>
+<span class="sourceLineNo">106</span>          TableDescriptorBuilder.newBuilder(tn).setColumnFamily(cfDescBuilder.build());<a name="line.106"></a>
+<span class="sourceLineNo">107</span>      admin.createTable(tableDescBuilder.build());<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      // test simple get and put<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      Put put = new Put(row);<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      put.addColumn(family, qualifier, value);<a name="line.110"></a>
+<span class="sourceLineNo">111</span>      table.put(put);<a name="line.111"></a>
+<span class="sourceLineNo">112</span>      Get get = new Get(row);<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      Result result = table.get(get);<a name="line.113"></a>
+<span class="sourceLineNo">114</span>      LOG.debug("Result: " + Bytes.toString(result.getValue(family, qualifier)));<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      Assert.assertArrayEquals(value, result.getValue(family, qualifier));<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    } finally {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      admin.close();<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      table.close();<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  @Test(timeout = 60000)<a name="line.122"></a>
+<span class="sourceLineNo">123</span>  public void testMasterSwitch() throws Exception {<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // get an admin instance and issue some request first<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    Connection conn = TEST_UTIL.getConnection();<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    Admin admin = conn.getAdmin();<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    LOG.debug("Tables: " + admin.listTableDescriptors());<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    try {<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      // switch active master<a name="line.130"></a>
+<span class="sourceLineNo">131</span>      HMaster master = cluster.getMaster();<a name="line.131"></a>
+<span class="sourceLineNo">132</span>      master.stopMaster();<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      while (!master.isShutDown()) {<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        Thread.sleep(200);<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      }<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      while (cluster.getMaster() == null || !cluster.getMaster().isInitialized()) {<a name="line.136"></a>
+<span class="sourceLineNo">137</span>        Thread.sleep(200);<a name="line.137"></a>
+<span class="sourceLineNo">138</span>      }<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      // confirm client access still works<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      Assert.assertTrue(admin.balance(false));<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    } finally {<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      admin.close();<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    }<a name="line.143"></a>
+<span class="sourceLineNo">144</span>  }<a name="line.144"></a>
+<span class="sourceLineNo">145</span><a name="line.145"></a>
+<span class="sourceLineNo">146</span>  @Test(timeout = 60000)<a name="line.146"></a>
+<span class="sourceLineNo">147</span>  public void testMetaRegionMove() throws Exception {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    TableName tn = TableName.valueOf(name.getMethodName());<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    // create table<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    Connection conn = TEST_UTIL.getConnection();<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    Admin admin = conn.getAdmin();<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    HTable table = (HTable) conn.getTable(tn);<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    try {<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      ColumnFamilyDescriptorBuilder cfDescBuilder =<a name="line.155"></a>
+<span class="sourceLineNo">156</span>          ColumnFamilyDescriptorBuilder.newBuilder(family);<a name="line.156"></a>
+<span class="sourceLineNo">157</span>      TableDescriptorBuilder tableDescBuilder =<a name="line.157"></a>
+<span class="sourceLineNo">158</span>          TableDescriptorBuilder.newBuilder(tn).setColumnFamily(cfDescBuilder.build());<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      admin.createTable(tableDescBuilder.build());<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      // issue some requests to cache the region location<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      Put put = new Put(row);<a name="line.161"></a>
+<span class="sourceLineNo">162</span>      put.addColumn(family, qualifier, value);<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      table.put(put);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      Get get = new Get(row);<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      Result result = table.get(get);<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      // move meta region and confirm client could detect<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      byte[] destServerName = null;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      for (RegionServerThread rst : cluster.getLiveRegionServerThreads()) {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>        ServerName name = rst.getRegionServer().getServerName();<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        if (!name.equals(cluster.getServerHoldingMeta())) {<a name="line.170"></a>
+<span class="sourceLineNo">171</span>          destServerName = Bytes.toBytes(name.getServerName());<a name="line.171"></a>
+<span class="sourceLineNo">172</span>          break;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        }<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      }<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      admin.move(RegionInfoBuilder.FIRST_META_REGIONINFO.getEncodedNameAsBytes(), destServerName);<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      LOG.debug("Finished moving meta");<a name="line.176"></a>
+<span class="sourceLineNo">177</span>      // invalidate client cache<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      RegionInfo region =<a name="line.178"></a>
+<span class="sourceLineNo">179</span>          table.getRegionLocator().getRegionLocation(row).getRegion();<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      ServerName currentServer = cluster.getServerHoldingRegion(tn, region.getRegionName());<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      for (RegionServerThread rst : cluster.getLiveRegionServerThreads()) {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>        ServerName name = rst.getRegionServer().getServerName();<a name="line.182"></a>
+<span class="sourceLineNo">183</span>        if (!name.equals(currentServer)) {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>          destServerName = Bytes.toBytes(name.getServerName());<a name="line.184"></a>
+<span class="sourceLineNo">185</span>          break;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>        }<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      }<a name="line.187"></a>
+<span class="sourceLineNo">188</span>      admin.move(region.getEncodedNameAsBytes(), destServerName);<a name="line.188"></a>
+<span class="sourceLineNo">189</span>      LOG.debug("Finished moving user region");<a name="line.189"></a>
+<span class="sourceLineNo">190</span>      put = new Put(row);<a name="line.190"></a>
+<span class="sourceLineNo">191</span>      put.addColumn(family, qualifier, newVal);<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      table.put(put);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>      result = table.get(get);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      LOG.debug("Result: " + Bytes.toString(result.getValue(family, qualifier)));<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      Assert.assertArrayEquals(newVal, result.getValue(family, qualifier));<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    } finally {<a name="line.196"></a>
+<span class="sourceLineNo">197</span>      admin.close();<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      table.close();<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    }<a name="line.199"></a>
+<span class="sourceLineNo">200</span>  }<a name="line.200"></a>
+<span class="sourceLineNo">201</span><a name="line.201"></a>
+<span class="sourceLineNo">202</span>  @Test(timeout = 120000)<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  public void testMetaMoveDuringClientZkClusterRestart() throws Exception {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>    TableName tn = TableName.valueOf(name.getMethodName());<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    // create table<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    ClusterConnection conn = (ClusterConnection) TEST_UTIL.getConnection();<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    Admin admin = conn.getAdmin();<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    HTable table = (HTable) conn.getTable(tn);<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    try {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      ColumnFamilyDescriptorBuilder cfDescBuilder =<a name="line.210"></a>
+<span class="sourceLineNo">211</span>          ColumnFamilyDescriptorBuilder.newBuilder(family);<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      TableDescriptorBuilder tableDescBuilder =<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          TableDescriptorBuilder.newBuilder(tn).setColumnFamily(cfDescBuilder.build());<a name="line.213"></a>
+<span class="sourceLineNo">214</span>      admin.createTable(tableDescBuilder.build());<a name="line.214"></a>
+<span class="sourceLineNo">215</span>      // put some data<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      Put put = new Put(row);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      put.addColumn(family, qualifier, value);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      table.put(put);<a name="line.218"></a>
+<span class="sourceLineNo">219</span>      // invalid connection cache<a name="line.219"></a>
+<span class="sourceLineNo">220</span>      conn.clearRegionCache();<a name="line.220"></a>
+<span class="sourceLineNo">221</span>      // stop client zk cluster<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      clientZkCluster.shutdown();<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      // stop current meta server and confirm the server shutdown process<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      // is not affected by client ZK crash<a name="line.224"></a>
+<span class="sourceLineNo">225</span>      MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      int metaServerId = cluster.getServerWithMeta();<a name="line.226"></a>
+<span class="sourceLineNo">227</span>      HRegionServer metaServer = cluster.getRegionServer(metaServerId);<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      metaServer.stop("Stop current RS holding meta region");<a name="line.228"></a>
+<span class="sourceLineNo">229</span>      while (!metaServer.isShutDown()) {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>        Thread.sleep(200);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>      }<a name="line.231"></a>
+<span class="sourceLineNo">232</span>      // wait for meta region online<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      try {<a name="line.233"></a>
+<span class="sourceLineNo">234</span>        cluster.getMaster().getAssignmentManager()<a name="line.234"></a>
+<span class="sourceLineNo">235</span>          .waitForAssignment(RegionInfoBuilder.FIRST_META_REGIONINFO);<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      } catch (NoSuchProcedureException e) {<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        // we don't need to take any further action<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      }<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      // wait some long time to make sure we will retry sync data to client ZK until data set<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      Thread.sleep(10000);<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      clientZkCluster.startup(clientZkDir);<a name="line.241"></a>
+<span class="sourceLineNo">242</span>      // new request should pass<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      Get get = new Get(row);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>      Result result = table.get(get);<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      LOG.debug("Result: " + Bytes.toString(result.getValue(family, qualifier)));<a name="line.245"></a>
+<span class="sourceLineNo">246</span>      Assert.assertArrayEquals(value, result.getValue(family, qualifier));<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    } finally {<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      admin.close();<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      table.close();<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    }<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  }<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>  @Test(timeout = 60000)<a name="line.253"></a>
+<span class="sourceLineNo">254</span>  public void testAsyncTable() throws Exception {<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    TableName tn = TableName.valueOf(name.getMethodName());<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    ColumnFamilyDescriptorBuilder cfDescBuilder = ColumnFamilyDescriptorBuilder.newBuilder(family);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>    TableDescriptorBuilder tableDescBuilder =<a name="line.257"></a>
+<span class="sourceLineNo">258</span>        TableDescriptorBuilder.newBuilder(tn).setColumnFamily(cfDescBuilder.build());<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    try (AsyncConnection ASYNC_CONN =<a name="line.259"></a>
+<span class="sourceLineNo">260</span>        ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get()) {<a name="line.260"></a>
+<span class="sourceLineNo">261</span>      ASYNC_CONN.getAdmin().createTable(tableDescBuilder.build()).get();<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      AsyncTable&lt;?&gt; table = ASYNC_CONN.getTable(tn);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      // put some data<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      Put put = new Put(row);<a name="line.264"></a>
+<span class="sourceLineNo">265</span>      put.addColumn(family, qualifier, value);<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      table.put(put).get();<a name="line.266"></a>
+<span class="sourceLineNo">267</span>      // get and verify<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      Get get = new Get(row);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      Result result = table.get(get).get();<a name="line.269"></a>
+<span class="sourceLineNo">270</span>      LOG.debug("Result: " + Bytes.toString(result.getValue(family, qualifier)));<a name="line.270"></a>
+<span class="sourceLineNo">271</span>      Assert.assertArrayEquals(value, result.getValue(family, qualifier));<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    }<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
+<span class="sourceLineNo">274</span>}<a name="line.274"></a>
 
 
 


[20/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
index 3e25d25..87545d3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
@@ -33,220 +33,221 @@
 <span class="sourceLineNo">025</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.concurrent.BlockingQueue;<a name="line.26"></a>
 <span class="sourceLineNo">027</span><a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.commons.logging.Log;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.commons.logging.LogFactory;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HConstants;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.Server;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.zookeeper.ZKListener;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.zookeeper.CreateMode;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.zookeeper.KeeperException;<a name="line.38"></a>
-<span class="sourceLineNo">039</span><a name="line.39"></a>
-<span class="sourceLineNo">040</span>/**<a name="line.40"></a>
-<span class="sourceLineNo">041</span> * Tracks the target znode(s) on server ZK cluster and synchronize them to client ZK cluster if<a name="line.41"></a>
-<span class="sourceLineNo">042</span> * changed<a name="line.42"></a>
-<span class="sourceLineNo">043</span> * &lt;p/&gt;<a name="line.43"></a>
-<span class="sourceLineNo">044</span> * The target znode(s) is given through {@link #getNodesToWatch()} method<a name="line.44"></a>
-<span class="sourceLineNo">045</span> */<a name="line.45"></a>
-<span class="sourceLineNo">046</span>@InterfaceAudience.Private<a name="line.46"></a>
-<span class="sourceLineNo">047</span>public abstract class ClientZKSyncer extends ZKListener {<a name="line.47"></a>
-<span class="sourceLineNo">048</span>  private static final Log LOG = LogFactory.getLog(ClientZKSyncer.class);<a name="line.48"></a>
-<span class="sourceLineNo">049</span>  private final Server server;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>  private final ZKWatcher clientZkWatcher;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>  // We use queues and daemon threads to synchronize the data to client ZK cluster<a name="line.51"></a>
-<span class="sourceLineNo">052</span>  // to avoid blocking the single event thread for watchers<a name="line.52"></a>
-<span class="sourceLineNo">053</span>  private final Map&lt;String, BlockingQueue&lt;byte[]&gt;&gt; queues;<a name="line.53"></a>
-<span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>  public ClientZKSyncer(ZKWatcher watcher, ZKWatcher clientZkWatcher, Server server) {<a name="line.55"></a>
-<span class="sourceLineNo">056</span>    super(watcher);<a name="line.56"></a>
-<span class="sourceLineNo">057</span>    this.server = server;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>    this.clientZkWatcher = clientZkWatcher;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>    this.queues = new HashMap&lt;&gt;();<a name="line.59"></a>
-<span class="sourceLineNo">060</span>  }<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>  /**<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * Starts the syncer<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * @throws KeeperException if error occurs when trying to create base nodes on client ZK<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  public void start() throws KeeperException {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    LOG.debug("Starting " + getClass().getSimpleName());<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    this.watcher.registerListener(this);<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    // create base znode on remote ZK<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    ZKUtil.createWithParents(clientZkWatcher, watcher.znodePaths.baseZNode);<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    // set meta znodes for client ZK<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    Collection&lt;String&gt; nodes = getNodesToWatch();<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    LOG.debug("Znodes to watch: " + nodes);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    // initialize queues and threads<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    for (String node : nodes) {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>      BlockingQueue&lt;byte[]&gt; queue = new ArrayBlockingQueue&lt;&gt;(1);<a name="line.76"></a>
-<span class="sourceLineNo">077</span>      queues.put(node, queue);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>      Thread updater = new ClientZkUpdater(node, queue);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>      updater.setDaemon(true);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      updater.start();<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      watchAndCheckExists(node);<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    }<a name="line.82"></a>
-<span class="sourceLineNo">083</span>  }<a name="line.83"></a>
-<span class="sourceLineNo">084</span><a name="line.84"></a>
-<span class="sourceLineNo">085</span>  private void watchAndCheckExists(String node) {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    try {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      if (ZKUtil.watchAndCheckExists(watcher, node)) {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>        byte[] data = ZKUtil.getDataAndWatch(watcher, node);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>        if (data != null) {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>          // put the data into queue<a name="line.90"></a>
-<span class="sourceLineNo">091</span>          upsertQueue(node, data);<a name="line.91"></a>
-<span class="sourceLineNo">092</span>        } else {<a name="line.92"></a>
-<span class="sourceLineNo">093</span>          // It existed but now does not, should has been tracked by our watcher, ignore<a name="line.93"></a>
-<span class="sourceLineNo">094</span>          LOG.debug("Found no data from " + node);<a name="line.94"></a>
-<span class="sourceLineNo">095</span>          watchAndCheckExists(node);<a name="line.95"></a>
-<span class="sourceLineNo">096</span>        }<a name="line.96"></a>
-<span class="sourceLineNo">097</span>      } else {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        // cleanup stale ZNodes on client ZK to avoid invalid requests to server<a name="line.98"></a>
-<span class="sourceLineNo">099</span>        ZKUtil.deleteNodeFailSilent(clientZkWatcher, node);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>      }<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    } catch (KeeperException e) {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      server.abort("Unexpected exception during initialization, aborting", e);<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    }<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  }<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  /**<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * Update the value of the single element in queue if any, or else insert.<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * &lt;p/&gt;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   * We only need to synchronize the latest znode value to client ZK rather than synchronize each<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * time<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * @param data the data to write to queue<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   */<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  private void upsertQueue(String node, byte[] data) {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    BlockingQueue&lt;byte[]&gt; queue = queues.get(node);<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    synchronized (queue) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      queue.poll();<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      queue.offer(data);<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  /**<a name="line.121"></a>
-<span class="sourceLineNo">122</span>   * Set data for client ZK and retry until succeed. Be very careful to prevent dead loop when<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   * modifying this method<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   * @param node the znode to set on client ZK<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   * @param data the data to set to client ZK<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * @throws InterruptedException if the thread is interrupted during process<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  private final void setDataForClientZkUntilSuccess(String node, byte[] data)<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      throws InterruptedException {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    while (!server.isStopped()) {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      try {<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        LOG.debug("Set data for remote " + node + ", client zk wather: " + clientZkWatcher);<a name="line.132"></a>
-<span class="sourceLineNo">133</span>        ZKUtil.setData(clientZkWatcher, node, data);<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        break;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      } catch (KeeperException.NoNodeException nne) {<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        // Node doesn't exist, create it and set value<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        try {<a name="line.137"></a>
-<span class="sourceLineNo">138</span>          ZKUtil.createNodeIfNotExistsNoWatch(clientZkWatcher, node, data, CreateMode.PERSISTENT);<a name="line.138"></a>
-<span class="sourceLineNo">139</span>          break;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        } catch (KeeperException.ConnectionLossException<a name="line.140"></a>
-<span class="sourceLineNo">141</span>            | KeeperException.SessionExpiredException ee) {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>          reconnectAfterExpiration();<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        } catch (KeeperException e) {<a name="line.143"></a>
-<span class="sourceLineNo">144</span>          LOG.warn(<a name="line.144"></a>
-<span class="sourceLineNo">145</span>            "Failed to create znode " + node + " due to: " + e.getMessage() + ", will retry later");<a name="line.145"></a>
-<span class="sourceLineNo">146</span>        }<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      } catch (KeeperException.ConnectionLossException<a name="line.147"></a>
-<span class="sourceLineNo">148</span>          | KeeperException.SessionExpiredException ee) {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        reconnectAfterExpiration();<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      } catch (KeeperException e) {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>        LOG.debug("Failed to set data to client ZK, will retry later", e);<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      Threads.sleep(HConstants.SOCKET_RETRY_WAIT_MS);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>  private final void reconnectAfterExpiration() throws InterruptedException {<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    LOG.warn("ZK session expired or lost. Retry a new connection...");<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    try {<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      clientZkWatcher.reconnectAfterExpiration();<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    } catch (IOException | KeeperException e) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      LOG.warn("Failed to reconnect to client zk after session expiration, will retry later", e);<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    }<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  @Override<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  public void nodeCreated(String path) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    if (!validate(path)) {<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      return;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    }<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    try {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>      byte[] data = ZKUtil.getDataAndWatch(watcher, path);<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      upsertQueue(path, data);<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    } catch (KeeperException e) {<a name="line.174"></a>
-<span class="sourceLineNo">175</span>      LOG.warn("Unexpected exception handling nodeCreated event", e);<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    }<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  }<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  @Override<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public void nodeDataChanged(String path) {<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    if (validate(path)) {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>      nodeCreated(path);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
-<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>  @Override<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  public synchronized void nodeDeleted(String path) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    if (validate(path)) {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      try {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        if (ZKUtil.watchAndCheckExists(watcher, path)) {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>          nodeCreated(path);<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        }<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      } catch (KeeperException e) {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        LOG.warn("Unexpected exception handling nodeDeleted event for path: " + path, e);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      }<a name="line.195"></a>
-<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * Validate whether a znode path is watched by us<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path the path to validate<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @return true if the znode is watched by us<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   */<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  abstract boolean validate(String path);<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * @return the znode(s) to watch<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
-<span class="sourceLineNo">209</span>  abstract Collection&lt;String&gt; getNodesToWatch();<a name="line.209"></a>
-<span class="sourceLineNo">210</span><a name="line.210"></a>
-<span class="sourceLineNo">211</span>  /**<a name="line.211"></a>
-<span class="sourceLineNo">212</span>   * Thread to synchronize znode data to client ZK cluster<a name="line.212"></a>
-<span class="sourceLineNo">213</span>   */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  class ClientZkUpdater extends Thread {<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    final String znode;<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    final BlockingQueue&lt;byte[]&gt; queue;<a name="line.216"></a>
-<span class="sourceLineNo">217</span><a name="line.217"></a>
-<span class="sourceLineNo">218</span>    public ClientZkUpdater(String znode, BlockingQueue&lt;byte[]&gt; queue) {<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      this.znode = znode;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      this.queue = queue;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      setName("ClientZKUpdater-" + znode);<a name="line.221"></a>
-<span class="sourceLineNo">222</span>    }<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>    @Override<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    public void run() {<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      while (!server.isStopped()) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        try {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          byte[] data = queue.take();<a name="line.228"></a>
-<span class="sourceLineNo">229</span>          setDataForClientZkUntilSuccess(znode, data);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>        } catch (InterruptedException e) {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>          if (LOG.isDebugEnabled()) {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>            LOG.debug(<a name="line.232"></a>
-<span class="sourceLineNo">233</span>              "Interrupted while checking whether need to update meta location to client zk");<a name="line.233"></a>
-<span class="sourceLineNo">234</span>          }<a name="line.234"></a>
-<span class="sourceLineNo">235</span>          Thread.currentThread().interrupt();<a name="line.235"></a>
-<span class="sourceLineNo">236</span>          break;<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      }<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    }<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
-<span class="sourceLineNo">241</span>}<a name="line.241"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.HConstants;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.Server;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.zookeeper.ZKListener;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.zookeeper.CreateMode;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.zookeeper.KeeperException;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.slf4j.Logger;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.slf4j.LoggerFactory;<a name="line.39"></a>
+<span class="sourceLineNo">040</span><a name="line.40"></a>
+<span class="sourceLineNo">041</span>/**<a name="line.41"></a>
+<span class="sourceLineNo">042</span> * Tracks the target znode(s) on server ZK cluster and synchronize them to client ZK cluster if<a name="line.42"></a>
+<span class="sourceLineNo">043</span> * changed<a name="line.43"></a>
+<span class="sourceLineNo">044</span> * &lt;p/&gt;<a name="line.44"></a>
+<span class="sourceLineNo">045</span> * The target znode(s) is given through {@link #getNodesToWatch()} method<a name="line.45"></a>
+<span class="sourceLineNo">046</span> */<a name="line.46"></a>
+<span class="sourceLineNo">047</span>@InterfaceAudience.Private<a name="line.47"></a>
+<span class="sourceLineNo">048</span>public abstract class ClientZKSyncer extends ZKListener {<a name="line.48"></a>
+<span class="sourceLineNo">049</span>  private static final Logger LOG = LoggerFactory.getLogger(ClientZKSyncer.class);<a name="line.49"></a>
+<span class="sourceLineNo">050</span>  private final Server server;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>  private final ZKWatcher clientZkWatcher;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  // We use queues and daemon threads to synchronize the data to client ZK cluster<a name="line.52"></a>
+<span class="sourceLineNo">053</span>  // to avoid blocking the single event thread for watchers<a name="line.53"></a>
+<span class="sourceLineNo">054</span>  private final Map&lt;String, BlockingQueue&lt;byte[]&gt;&gt; queues;<a name="line.54"></a>
+<span class="sourceLineNo">055</span><a name="line.55"></a>
+<span class="sourceLineNo">056</span>  public ClientZKSyncer(ZKWatcher watcher, ZKWatcher clientZkWatcher, Server server) {<a name="line.56"></a>
+<span class="sourceLineNo">057</span>    super(watcher);<a name="line.57"></a>
+<span class="sourceLineNo">058</span>    this.server = server;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>    this.clientZkWatcher = clientZkWatcher;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>    this.queues = new HashMap&lt;&gt;();<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  }<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  /**<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * Starts the syncer<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * @throws KeeperException if error occurs when trying to create base nodes on client ZK<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   */<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  public void start() throws KeeperException {<a name="line.67"></a>
+<span class="sourceLineNo">068</span>    LOG.debug("Starting " + getClass().getSimpleName());<a name="line.68"></a>
+<span class="sourceLineNo">069</span>    this.watcher.registerListener(this);<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    // create base znode on remote ZK<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    ZKUtil.createWithParents(clientZkWatcher, watcher.znodePaths.baseZNode);<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    // set meta znodes for client ZK<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    Collection&lt;String&gt; nodes = getNodesToWatch();<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    LOG.debug("Znodes to watch: " + nodes);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    // initialize queues and threads<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    for (String node : nodes) {<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      BlockingQueue&lt;byte[]&gt; queue = new ArrayBlockingQueue&lt;&gt;(1);<a name="line.77"></a>
+<span class="sourceLineNo">078</span>      queues.put(node, queue);<a name="line.78"></a>
+<span class="sourceLineNo">079</span>      Thread updater = new ClientZkUpdater(node, queue);<a name="line.79"></a>
+<span class="sourceLineNo">080</span>      updater.setDaemon(true);<a name="line.80"></a>
+<span class="sourceLineNo">081</span>      updater.start();<a name="line.81"></a>
+<span class="sourceLineNo">082</span>      watchAndCheckExists(node);<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
+<span class="sourceLineNo">084</span>  }<a name="line.84"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>  private void watchAndCheckExists(String node) {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    try {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      if (ZKUtil.watchAndCheckExists(watcher, node)) {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>        byte[] data = ZKUtil.getDataAndWatch(watcher, node);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>        if (data != null) {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>          // put the data into queue<a name="line.91"></a>
+<span class="sourceLineNo">092</span>          upsertQueue(node, data);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>        } else {<a name="line.93"></a>
+<span class="sourceLineNo">094</span>          // It existed but now does not, should has been tracked by our watcher, ignore<a name="line.94"></a>
+<span class="sourceLineNo">095</span>          LOG.debug("Found no data from " + node);<a name="line.95"></a>
+<span class="sourceLineNo">096</span>          watchAndCheckExists(node);<a name="line.96"></a>
+<span class="sourceLineNo">097</span>        }<a name="line.97"></a>
+<span class="sourceLineNo">098</span>      } else {<a name="line.98"></a>
+<span class="sourceLineNo">099</span>        // cleanup stale ZNodes on client ZK to avoid invalid requests to server<a name="line.99"></a>
+<span class="sourceLineNo">100</span>        ZKUtil.deleteNodeFailSilent(clientZkWatcher, node);<a name="line.100"></a>
+<span class="sourceLineNo">101</span>      }<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    } catch (KeeperException e) {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>      server.abort("Unexpected exception during initialization, aborting", e);<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    }<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  /**<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   * Update the value of the single element in queue if any, or else insert.<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * &lt;p/&gt;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * We only need to synchronize the latest znode value to client ZK rather than synchronize each<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   * time<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * @param data the data to write to queue<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   */<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private void upsertQueue(String node, byte[] data) {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    BlockingQueue&lt;byte[]&gt; queue = queues.get(node);<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    synchronized (queue) {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      queue.poll();<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      queue.offer(data);<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  /**<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * Set data for client ZK and retry until succeed. Be very careful to prevent dead loop when<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   * modifying this method<a name="line.124"></a>
+<span class="sourceLineNo">125</span>   * @param node the znode to set on client ZK<a name="line.125"></a>
+<span class="sourceLineNo">126</span>   * @param data the data to set to client ZK<a name="line.126"></a>
+<span class="sourceLineNo">127</span>   * @throws InterruptedException if the thread is interrupted during process<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   */<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  private final void setDataForClientZkUntilSuccess(String node, byte[] data)<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      throws InterruptedException {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    while (!server.isStopped()) {<a name="line.131"></a>
+<span class="sourceLineNo">132</span>      try {<a name="line.132"></a>
+<span class="sourceLineNo">133</span>        LOG.debug("Set data for remote " + node + ", client zk wather: " + clientZkWatcher);<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        ZKUtil.setData(clientZkWatcher, node, data);<a name="line.134"></a>
+<span class="sourceLineNo">135</span>        break;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      } catch (KeeperException.NoNodeException nne) {<a name="line.136"></a>
+<span class="sourceLineNo">137</span>        // Node doesn't exist, create it and set value<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        try {<a name="line.138"></a>
+<span class="sourceLineNo">139</span>          ZKUtil.createNodeIfNotExistsNoWatch(clientZkWatcher, node, data, CreateMode.PERSISTENT);<a name="line.139"></a>
+<span class="sourceLineNo">140</span>          break;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>        } catch (KeeperException.ConnectionLossException<a name="line.141"></a>
+<span class="sourceLineNo">142</span>            | KeeperException.SessionExpiredException ee) {<a name="line.142"></a>
+<span class="sourceLineNo">143</span>          reconnectAfterExpiration();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        } catch (KeeperException e) {<a name="line.144"></a>
+<span class="sourceLineNo">145</span>          LOG.warn(<a name="line.145"></a>
+<span class="sourceLineNo">146</span>            "Failed to create znode " + node + " due to: " + e.getMessage() + ", will retry later");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>        }<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      } catch (KeeperException.ConnectionLossException<a name="line.148"></a>
+<span class="sourceLineNo">149</span>          | KeeperException.SessionExpiredException ee) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        reconnectAfterExpiration();<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      } catch (KeeperException e) {<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        LOG.debug("Failed to set data to client ZK, will retry later", e);<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      }<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      Threads.sleep(HConstants.SOCKET_RETRY_WAIT_MS);<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    }<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  }<a name="line.156"></a>
+<span class="sourceLineNo">157</span><a name="line.157"></a>
+<span class="sourceLineNo">158</span>  private final void reconnectAfterExpiration() throws InterruptedException {<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    LOG.warn("ZK session expired or lost. Retry a new connection...");<a name="line.159"></a>
+<span class="sourceLineNo">160</span>    try {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      clientZkWatcher.reconnectAfterExpiration();<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    } catch (IOException | KeeperException e) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      LOG.warn("Failed to reconnect to client zk after session expiration, will retry later", e);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    }<a name="line.164"></a>
+<span class="sourceLineNo">165</span>  }<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>  @Override<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  public void nodeCreated(String path) {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    if (!validate(path)) {<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      return;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    }<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    try {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>      byte[] data = ZKUtil.getDataAndWatch(watcher, path);<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      upsertQueue(path, data);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    } catch (KeeperException e) {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      LOG.warn("Unexpected exception handling nodeCreated event", e);<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    }<a name="line.177"></a>
+<span class="sourceLineNo">178</span>  }<a name="line.178"></a>
+<span class="sourceLineNo">179</span><a name="line.179"></a>
+<span class="sourceLineNo">180</span>  @Override<a name="line.180"></a>
+<span class="sourceLineNo">181</span>  public void nodeDataChanged(String path) {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    if (validate(path)) {<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      nodeCreated(path);<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    }<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  }<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>  @Override<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  public synchronized void nodeDeleted(String path) {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    if (validate(path)) {<a name="line.189"></a>
+<span class="sourceLineNo">190</span>      try {<a name="line.190"></a>
+<span class="sourceLineNo">191</span>        if (ZKUtil.watchAndCheckExists(watcher, path)) {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>          nodeCreated(path);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>        }<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      } catch (KeeperException e) {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>        LOG.warn("Unexpected exception handling nodeDeleted event for path: " + path, e);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      }<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    }<a name="line.197"></a>
+<span class="sourceLineNo">198</span>  }<a name="line.198"></a>
+<span class="sourceLineNo">199</span><a name="line.199"></a>
+<span class="sourceLineNo">200</span>  /**<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * Validate whether a znode path is watched by us<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param path the path to validate<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @return true if the znode is watched by us<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   */<a name="line.204"></a>
+<span class="sourceLineNo">205</span>  abstract boolean validate(String path);<a name="line.205"></a>
+<span class="sourceLineNo">206</span><a name="line.206"></a>
+<span class="sourceLineNo">207</span>  /**<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   * @return the znode(s) to watch<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
+<span class="sourceLineNo">210</span>  abstract Collection&lt;String&gt; getNodesToWatch();<a name="line.210"></a>
+<span class="sourceLineNo">211</span><a name="line.211"></a>
+<span class="sourceLineNo">212</span>  /**<a name="line.212"></a>
+<span class="sourceLineNo">213</span>   * Thread to synchronize znode data to client ZK cluster<a name="line.213"></a>
+<span class="sourceLineNo">214</span>   */<a name="line.214"></a>
+<span class="sourceLineNo">215</span>  class ClientZkUpdater extends Thread {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    final String znode;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    final BlockingQueue&lt;byte[]&gt; queue;<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>    public ClientZkUpdater(String znode, BlockingQueue&lt;byte[]&gt; queue) {<a name="line.219"></a>
+<span class="sourceLineNo">220</span>      this.znode = znode;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>      this.queue = queue;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      setName("ClientZKUpdater-" + znode);<a name="line.222"></a>
+<span class="sourceLineNo">223</span>    }<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>    @Override<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    public void run() {<a name="line.226"></a>
+<span class="sourceLineNo">227</span>      while (!server.isStopped()) {<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        try {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          byte[] data = queue.take();<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          setDataForClientZkUntilSuccess(znode, data);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (InterruptedException e) {<a name="line.231"></a>
+<span class="sourceLineNo">232</span>          if (LOG.isDebugEnabled()) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>            LOG.debug(<a name="line.233"></a>
+<span class="sourceLineNo">234</span>              "Interrupted while checking whether need to update meta location to client zk");<a name="line.234"></a>
+<span class="sourceLineNo">235</span>          }<a name="line.235"></a>
+<span class="sourceLineNo">236</span>          Thread.currentThread().interrupt();<a name="line.236"></a>
+<span class="sourceLineNo">237</span>          break;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>        }<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      }<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    }<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
+<span class="sourceLineNo">242</span>}<a name="line.242"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html b/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
index 3e25d25..87545d3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
@@ -33,220 +33,221 @@
 <span class="sourceLineNo">025</span>import java.util.concurrent.ArrayBlockingQueue;<a name="line.25"></a>
 <span class="sourceLineNo">026</span>import java.util.concurrent.BlockingQueue;<a name="line.26"></a>
 <span class="sourceLineNo">027</span><a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.commons.logging.Log;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.commons.logging.LogFactory;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HConstants;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.Server;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.zookeeper.ZKListener;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.zookeeper.CreateMode;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.zookeeper.KeeperException;<a name="line.38"></a>
-<span class="sourceLineNo">039</span><a name="line.39"></a>
-<span class="sourceLineNo">040</span>/**<a name="line.40"></a>
-<span class="sourceLineNo">041</span> * Tracks the target znode(s) on server ZK cluster and synchronize them to client ZK cluster if<a name="line.41"></a>
-<span class="sourceLineNo">042</span> * changed<a name="line.42"></a>
-<span class="sourceLineNo">043</span> * &lt;p/&gt;<a name="line.43"></a>
-<span class="sourceLineNo">044</span> * The target znode(s) is given through {@link #getNodesToWatch()} method<a name="line.44"></a>
-<span class="sourceLineNo">045</span> */<a name="line.45"></a>
-<span class="sourceLineNo">046</span>@InterfaceAudience.Private<a name="line.46"></a>
-<span class="sourceLineNo">047</span>public abstract class ClientZKSyncer extends ZKListener {<a name="line.47"></a>
-<span class="sourceLineNo">048</span>  private static final Log LOG = LogFactory.getLog(ClientZKSyncer.class);<a name="line.48"></a>
-<span class="sourceLineNo">049</span>  private final Server server;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>  private final ZKWatcher clientZkWatcher;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>  // We use queues and daemon threads to synchronize the data to client ZK cluster<a name="line.51"></a>
-<span class="sourceLineNo">052</span>  // to avoid blocking the single event thread for watchers<a name="line.52"></a>
-<span class="sourceLineNo">053</span>  private final Map&lt;String, BlockingQueue&lt;byte[]&gt;&gt; queues;<a name="line.53"></a>
-<span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>  public ClientZKSyncer(ZKWatcher watcher, ZKWatcher clientZkWatcher, Server server) {<a name="line.55"></a>
-<span class="sourceLineNo">056</span>    super(watcher);<a name="line.56"></a>
-<span class="sourceLineNo">057</span>    this.server = server;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>    this.clientZkWatcher = clientZkWatcher;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>    this.queues = new HashMap&lt;&gt;();<a name="line.59"></a>
-<span class="sourceLineNo">060</span>  }<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>  /**<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * Starts the syncer<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * @throws KeeperException if error occurs when trying to create base nodes on client ZK<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  public void start() throws KeeperException {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    LOG.debug("Starting " + getClass().getSimpleName());<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    this.watcher.registerListener(this);<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    // create base znode on remote ZK<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    ZKUtil.createWithParents(clientZkWatcher, watcher.znodePaths.baseZNode);<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    // set meta znodes for client ZK<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    Collection&lt;String&gt; nodes = getNodesToWatch();<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    LOG.debug("Znodes to watch: " + nodes);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    // initialize queues and threads<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    for (String node : nodes) {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>      BlockingQueue&lt;byte[]&gt; queue = new ArrayBlockingQueue&lt;&gt;(1);<a name="line.76"></a>
-<span class="sourceLineNo">077</span>      queues.put(node, queue);<a name="line.77"></a>
-<span class="sourceLineNo">078</span>      Thread updater = new ClientZkUpdater(node, queue);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>      updater.setDaemon(true);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      updater.start();<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      watchAndCheckExists(node);<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    }<a name="line.82"></a>
-<span class="sourceLineNo">083</span>  }<a name="line.83"></a>
-<span class="sourceLineNo">084</span><a name="line.84"></a>
-<span class="sourceLineNo">085</span>  private void watchAndCheckExists(String node) {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    try {<a name="line.86"></a>
-<span class="sourceLineNo">087</span>      if (ZKUtil.watchAndCheckExists(watcher, node)) {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>        byte[] data = ZKUtil.getDataAndWatch(watcher, node);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>        if (data != null) {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>          // put the data into queue<a name="line.90"></a>
-<span class="sourceLineNo">091</span>          upsertQueue(node, data);<a name="line.91"></a>
-<span class="sourceLineNo">092</span>        } else {<a name="line.92"></a>
-<span class="sourceLineNo">093</span>          // It existed but now does not, should has been tracked by our watcher, ignore<a name="line.93"></a>
-<span class="sourceLineNo">094</span>          LOG.debug("Found no data from " + node);<a name="line.94"></a>
-<span class="sourceLineNo">095</span>          watchAndCheckExists(node);<a name="line.95"></a>
-<span class="sourceLineNo">096</span>        }<a name="line.96"></a>
-<span class="sourceLineNo">097</span>      } else {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        // cleanup stale ZNodes on client ZK to avoid invalid requests to server<a name="line.98"></a>
-<span class="sourceLineNo">099</span>        ZKUtil.deleteNodeFailSilent(clientZkWatcher, node);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>      }<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    } catch (KeeperException e) {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      server.abort("Unexpected exception during initialization, aborting", e);<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    }<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  }<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  /**<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * Update the value of the single element in queue if any, or else insert.<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * &lt;p/&gt;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   * We only need to synchronize the latest znode value to client ZK rather than synchronize each<a name="line.109"></a>
-<span class="sourceLineNo">110</span>   * time<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * @param data the data to write to queue<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   */<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  private void upsertQueue(String node, byte[] data) {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    BlockingQueue&lt;byte[]&gt; queue = queues.get(node);<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    synchronized (queue) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      queue.poll();<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      queue.offer(data);<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  /**<a name="line.121"></a>
-<span class="sourceLineNo">122</span>   * Set data for client ZK and retry until succeed. Be very careful to prevent dead loop when<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   * modifying this method<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   * @param node the znode to set on client ZK<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   * @param data the data to set to client ZK<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * @throws InterruptedException if the thread is interrupted during process<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  private final void setDataForClientZkUntilSuccess(String node, byte[] data)<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      throws InterruptedException {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    while (!server.isStopped()) {<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      try {<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        LOG.debug("Set data for remote " + node + ", client zk wather: " + clientZkWatcher);<a name="line.132"></a>
-<span class="sourceLineNo">133</span>        ZKUtil.setData(clientZkWatcher, node, data);<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        break;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      } catch (KeeperException.NoNodeException nne) {<a name="line.135"></a>
-<span class="sourceLineNo">136</span>        // Node doesn't exist, create it and set value<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        try {<a name="line.137"></a>
-<span class="sourceLineNo">138</span>          ZKUtil.createNodeIfNotExistsNoWatch(clientZkWatcher, node, data, CreateMode.PERSISTENT);<a name="line.138"></a>
-<span class="sourceLineNo">139</span>          break;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>        } catch (KeeperException.ConnectionLossException<a name="line.140"></a>
-<span class="sourceLineNo">141</span>            | KeeperException.SessionExpiredException ee) {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>          reconnectAfterExpiration();<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        } catch (KeeperException e) {<a name="line.143"></a>
-<span class="sourceLineNo">144</span>          LOG.warn(<a name="line.144"></a>
-<span class="sourceLineNo">145</span>            "Failed to create znode " + node + " due to: " + e.getMessage() + ", will retry later");<a name="line.145"></a>
-<span class="sourceLineNo">146</span>        }<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      } catch (KeeperException.ConnectionLossException<a name="line.147"></a>
-<span class="sourceLineNo">148</span>          | KeeperException.SessionExpiredException ee) {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        reconnectAfterExpiration();<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      } catch (KeeperException e) {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>        LOG.debug("Failed to set data to client ZK, will retry later", e);<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      }<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      Threads.sleep(HConstants.SOCKET_RETRY_WAIT_MS);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>  private final void reconnectAfterExpiration() throws InterruptedException {<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    LOG.warn("ZK session expired or lost. Retry a new connection...");<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    try {<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      clientZkWatcher.reconnectAfterExpiration();<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    } catch (IOException | KeeperException e) {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      LOG.warn("Failed to reconnect to client zk after session expiration, will retry later", e);<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    }<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  @Override<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  public void nodeCreated(String path) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    if (!validate(path)) {<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      return;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    }<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    try {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>      byte[] data = ZKUtil.getDataAndWatch(watcher, path);<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      upsertQueue(path, data);<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    } catch (KeeperException e) {<a name="line.174"></a>
-<span class="sourceLineNo">175</span>      LOG.warn("Unexpected exception handling nodeCreated event", e);<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    }<a name="line.176"></a>
-<span class="sourceLineNo">177</span>  }<a name="line.177"></a>
-<span class="sourceLineNo">178</span><a name="line.178"></a>
-<span class="sourceLineNo">179</span>  @Override<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  public void nodeDataChanged(String path) {<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    if (validate(path)) {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>      nodeCreated(path);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
-<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
-<span class="sourceLineNo">185</span><a name="line.185"></a>
-<span class="sourceLineNo">186</span>  @Override<a name="line.186"></a>
-<span class="sourceLineNo">187</span>  public synchronized void nodeDeleted(String path) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    if (validate(path)) {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      try {<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        if (ZKUtil.watchAndCheckExists(watcher, path)) {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>          nodeCreated(path);<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        }<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      } catch (KeeperException e) {<a name="line.193"></a>
-<span class="sourceLineNo">194</span>        LOG.warn("Unexpected exception handling nodeDeleted event for path: " + path, e);<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      }<a name="line.195"></a>
-<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
-<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
-<span class="sourceLineNo">198</span><a name="line.198"></a>
-<span class="sourceLineNo">199</span>  /**<a name="line.199"></a>
-<span class="sourceLineNo">200</span>   * Validate whether a znode path is watched by us<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   * @param path the path to validate<a name="line.201"></a>
-<span class="sourceLineNo">202</span>   * @return true if the znode is watched by us<a name="line.202"></a>
-<span class="sourceLineNo">203</span>   */<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  abstract boolean validate(String path);<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  /**<a name="line.206"></a>
-<span class="sourceLineNo">207</span>   * @return the znode(s) to watch<a name="line.207"></a>
-<span class="sourceLineNo">208</span>   */<a name="line.208"></a>
-<span class="sourceLineNo">209</span>  abstract Collection&lt;String&gt; getNodesToWatch();<a name="line.209"></a>
-<span class="sourceLineNo">210</span><a name="line.210"></a>
-<span class="sourceLineNo">211</span>  /**<a name="line.211"></a>
-<span class="sourceLineNo">212</span>   * Thread to synchronize znode data to client ZK cluster<a name="line.212"></a>
-<span class="sourceLineNo">213</span>   */<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  class ClientZkUpdater extends Thread {<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    final String znode;<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    final BlockingQueue&lt;byte[]&gt; queue;<a name="line.216"></a>
-<span class="sourceLineNo">217</span><a name="line.217"></a>
-<span class="sourceLineNo">218</span>    public ClientZkUpdater(String znode, BlockingQueue&lt;byte[]&gt; queue) {<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      this.znode = znode;<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      this.queue = queue;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      setName("ClientZKUpdater-" + znode);<a name="line.221"></a>
-<span class="sourceLineNo">222</span>    }<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>    @Override<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    public void run() {<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      while (!server.isStopped()) {<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        try {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>          byte[] data = queue.take();<a name="line.228"></a>
-<span class="sourceLineNo">229</span>          setDataForClientZkUntilSuccess(znode, data);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>        } catch (InterruptedException e) {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>          if (LOG.isDebugEnabled()) {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>            LOG.debug(<a name="line.232"></a>
-<span class="sourceLineNo">233</span>              "Interrupted while checking whether need to update meta location to client zk");<a name="line.233"></a>
-<span class="sourceLineNo">234</span>          }<a name="line.234"></a>
-<span class="sourceLineNo">235</span>          Thread.currentThread().interrupt();<a name="line.235"></a>
-<span class="sourceLineNo">236</span>          break;<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        }<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      }<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    }<a name="line.239"></a>
-<span class="sourceLineNo">240</span>  }<a name="line.240"></a>
-<span class="sourceLineNo">241</span>}<a name="line.241"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.HConstants;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.Server;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.zookeeper.ZKListener;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.zookeeper.ZKUtil;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.zookeeper.ZKWatcher;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.zookeeper.CreateMode;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.zookeeper.KeeperException;<a name="line.36"></a>
+<span class="sourceLineNo">037</span><a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.slf4j.Logger;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.slf4j.LoggerFactory;<a name="line.39"></a>
+<span class="sourceLineNo">040</span><a name="line.40"></a>
+<span class="sourceLineNo">041</span>/**<a name="line.41"></a>
+<span class="sourceLineNo">042</span> * Tracks the target znode(s) on server ZK cluster and synchronize them to client ZK cluster if<a name="line.42"></a>
+<span class="sourceLineNo">043</span> * changed<a name="line.43"></a>
+<span class="sourceLineNo">044</span> * &lt;p/&gt;<a name="line.44"></a>
+<span class="sourceLineNo">045</span> * The target znode(s) is given through {@link #getNodesToWatch()} method<a name="line.45"></a>
+<span class="sourceLineNo">046</span> */<a name="line.46"></a>
+<span class="sourceLineNo">047</span>@InterfaceAudience.Private<a name="line.47"></a>
+<span class="sourceLineNo">048</span>public abstract class ClientZKSyncer extends ZKListener {<a name="line.48"></a>
+<span class="sourceLineNo">049</span>  private static final Logger LOG = LoggerFactory.getLogger(ClientZKSyncer.class);<a name="line.49"></a>
+<span class="sourceLineNo">050</span>  private final Server server;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>  private final ZKWatcher clientZkWatcher;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  // We use queues and daemon threads to synchronize the data to client ZK cluster<a name="line.52"></a>
+<span class="sourceLineNo">053</span>  // to avoid blocking the single event thread for watchers<a name="line.53"></a>
+<span class="sourceLineNo">054</span>  private final Map&lt;String, BlockingQueue&lt;byte[]&gt;&gt; queues;<a name="line.54"></a>
+<span class="sourceLineNo">055</span><a name="line.55"></a>
+<span class="sourceLineNo">056</span>  public ClientZKSyncer(ZKWatcher watcher, ZKWatcher clientZkWatcher, Server server) {<a name="line.56"></a>
+<span class="sourceLineNo">057</span>    super(watcher);<a name="line.57"></a>
+<span class="sourceLineNo">058</span>    this.server = server;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>    this.clientZkWatcher = clientZkWatcher;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>    this.queues = new HashMap&lt;&gt;();<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  }<a name="line.61"></a>
+<span class="sourceLineNo">062</span><a name="line.62"></a>
+<span class="sourceLineNo">063</span>  /**<a name="line.63"></a>
+<span class="sourceLineNo">064</span>   * Starts the syncer<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * @throws KeeperException if error occurs when trying to create base nodes on client ZK<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   */<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  public void start() throws KeeperException {<a name="line.67"></a>
+<span class="sourceLineNo">068</span>    LOG.debug("Starting " + getClass().getSimpleName());<a name="line.68"></a>
+<span class="sourceLineNo">069</span>    this.watcher.registerListener(this);<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    // create base znode on remote ZK<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    ZKUtil.createWithParents(clientZkWatcher, watcher.znodePaths.baseZNode);<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    // set meta znodes for client ZK<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    Collection&lt;String&gt; nodes = getNodesToWatch();<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    LOG.debug("Znodes to watch: " + nodes);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    // initialize queues and threads<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    for (String node : nodes) {<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      BlockingQueue&lt;byte[]&gt; queue = new ArrayBlockingQueue&lt;&gt;(1);<a name="line.77"></a>
+<span class="sourceLineNo">078</span>      queues.put(node, queue);<a name="line.78"></a>
+<span class="sourceLineNo">079</span>      Thread updater = new ClientZkUpdater(node, queue);<a name="line.79"></a>
+<span class="sourceLineNo">080</span>      updater.setDaemon(true);<a name="line.80"></a>
+<span class="sourceLineNo">081</span>      updater.start();<a name="line.81"></a>
+<span class="sourceLineNo">082</span>      watchAndCheckExists(node);<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    }<a name="line.83"></a>
+<span class="sourceLineNo">084</span>  }<a name="line.84"></a>
+<span class="sourceLineNo">085</span><a name="line.85"></a>
+<span class="sourceLineNo">086</span>  private void watchAndCheckExists(String node) {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    try {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>      if (ZKUtil.watchAndCheckExists(watcher, node)) {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>        byte[] data = ZKUtil.getDataAndWatch(watcher, node);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>        if (data != null) {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>          // put the data into queue<a name="line.91"></a>
+<span class="sourceLineNo">092</span>          upsertQueue(node, data);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>        } else {<a name="line.93"></a>
+<span class="sourceLineNo">094</span>          // It existed but now does not, should has been tracked by our watcher, ignore<a name="line.94"></a>
+<span class="sourceLineNo">095</span>          LOG.debug("Found no data from " + node);<a name="line.95"></a>
+<span class="sourceLineNo">096</span>          watchAndCheckExists(node);<a name="line.96"></a>
+<span class="sourceLineNo">097</span>        }<a name="line.97"></a>
+<span class="sourceLineNo">098</span>      } else {<a name="line.98"></a>
+<span class="sourceLineNo">099</span>        // cleanup stale ZNodes on client ZK to avoid invalid requests to server<a name="line.99"></a>
+<span class="sourceLineNo">100</span>        ZKUtil.deleteNodeFailSilent(clientZkWatcher, node);<a name="line.100"></a>
+<span class="sourceLineNo">101</span>      }<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    } catch (KeeperException e) {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>      server.abort("Unexpected exception during initialization, aborting", e);<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    }<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  /**<a name="line.107"></a>
+<span class="sourceLineNo">108</span>   * Update the value of the single element in queue if any, or else insert.<a name="line.108"></a>
+<span class="sourceLineNo">109</span>   * &lt;p/&gt;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>   * We only need to synchronize the latest znode value to client ZK rather than synchronize each<a name="line.110"></a>
+<span class="sourceLineNo">111</span>   * time<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * @param data the data to write to queue<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   */<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  private void upsertQueue(String node, byte[] data) {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    BlockingQueue&lt;byte[]&gt; queue = queues.get(node);<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    synchronized (queue) {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      queue.poll();<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      queue.offer(data);<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  /**<a name="line.122"></a>
+<span class="sourceLineNo">123</span>   * Set data for client ZK and retry until succeed. Be very careful to prevent dead loop when<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   * modifying this method<a name="line.124"></a>
+<span class="sourceLineNo">125</span>   * @param node the znode to set on client ZK<a name="line.125"></a>
+<span class="sourceLineNo">126</span>   * @param data the data to set to client ZK<a name="line.126"></a>
+<span class="sourceLineNo">127</span>   * @throws InterruptedException if the thread is interrupted during process<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   */<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  private final void setDataForClientZkUntilSuccess(String node, byte[] data)<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      throws InterruptedException {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    while (!server.isStopped()) {<a name="line.131"></a>
+<span class="sourceLineNo">132</span>      try {<a name="line.132"></a>
+<span class="sourceLineNo">133</span>        LOG.debug("Set data for remote " + node + ", client zk wather: " + clientZkWatcher);<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        ZKUtil.setData(clientZkWatcher, node, data);<a name="line.134"></a>
+<span class="sourceLineNo">135</span>        break;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      } catch (KeeperException.NoNodeException nne) {<a name="line.136"></a>
+<span class="sourceLineNo">137</span>        // Node doesn't exist, create it and set value<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        try {<a name="line.138"></a>
+<span class="sourceLineNo">139</span>          ZKUtil.createNodeIfNotExistsNoWatch(clientZkWatcher, node, data, CreateMode.PERSISTENT);<a name="line.139"></a>
+<span class="sourceLineNo">140</span>          break;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>        } catch (KeeperException.ConnectionLossException<a name="line.141"></a>
+<span class="sourceLineNo">142</span>            | KeeperException.SessionExpiredException ee) {<a name="line.142"></a>
+<span class="sourceLineNo">143</span>          reconnectAfterExpiration();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        } catch (KeeperException e) {<a name="line.144"></a>
+<span class="sourceLineNo">145</span>          LOG.warn(<a name="line.145"></a>
+<span class="sourceLineNo">146</span>            "Failed to create znode " + node + " due to: " + e.getMessage() + ", will retry later");<a name="line.146"></a>
+<span class="sourceLineNo">147</span>        }<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      } catch (KeeperException.ConnectionLossException<a name="line.148"></a>
+<span class="sourceLineNo">149</span>          | KeeperException.SessionExpiredException ee) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>        reconnectAfterExpiration();<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      } catch (KeeperException e) {<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        LOG.debug("Failed to set data to client ZK, will retry later", e);<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      }<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      Threads.sleep(HConstants.SOCKET_RETRY_WAIT_MS);<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    }<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  }<a name="line.156"></a>
+<span class="sourceLineNo">157</span><a name="line.157"></a>
+<span class="sourceLineNo">158</span>  private final void reconnectAfterExpiration() throws InterruptedException {<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    LOG.warn("ZK session expired or lost. Retry a new connection...");<a name="line.159"></a>
+<span class="sourceLineNo">160</span>    try {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      clientZkWatcher.reconnectAfterExpiration();<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    } catch (IOException | KeeperException e) {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      LOG.warn("Failed to reconnect to client zk after session expiration, will retry later", e);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    }<a name="line.164"></a>
+<span class="sourceLineNo">165</span>  }<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>  @Override<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  public void nodeCreated(String path) {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    if (!validate(path)) {<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      return;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    }<a name="line.171"></a>
+<span class="sourceLineNo">172</span>    try {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>      byte[] data = ZKUtil.getDataAndWatch(watcher, path);<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      upsertQueue(path, data);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    } catch (KeeperException e) {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      LOG.warn("Unexpected exception handling nodeCreated event", e);<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    }<a name="line.177"></a>
+<span class="sourceLineNo">178</span>  }<a name="line.178"></a>
+<span class="sourceLineNo">179</span><a name="line.179"></a>
+<span class="sourceLineNo">180</span>  @Override<a name="line.180"></a>
+<span class="sourceLineNo">181</span>  public void nodeDataChanged(String path) {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    if (validate(path)) {<a name="line.182"></a>
+<span class="sourceLineNo">183</span>      nodeCreated(path);<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    }<a name="line.184"></a>
+<span class="sourceLineNo">185</span>  }<a name="line.185"></a>
+<span class="sourceLineNo">186</span><a name="line.186"></a>
+<span class="sourceLineNo">187</span>  @Override<a name="line.187"></a>
+<span class="sourceLineNo">188</span>  public synchronized void nodeDeleted(String path) {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    if (validate(path)) {<a name="line.189"></a>
+<span class="sourceLineNo">190</span>      try {<a name="line.190"></a>
+<span class="sourceLineNo">191</span>        if (ZKUtil.watchAndCheckExists(watcher, path)) {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>          nodeCreated(path);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>        }<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      } catch (KeeperException e) {<a name="line.194"></a>
+<span class="sourceLineNo">195</span>        LOG.warn("Unexpected exception handling nodeDeleted event for path: " + path, e);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      }<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    }<a name="line.197"></a>
+<span class="sourceLineNo">198</span>  }<a name="line.198"></a>
+<span class="sourceLineNo">199</span><a name="line.199"></a>
+<span class="sourceLineNo">200</span>  /**<a name="line.200"></a>
+<span class="sourceLineNo">201</span>   * Validate whether a znode path is watched by us<a name="line.201"></a>
+<span class="sourceLineNo">202</span>   * @param path the path to validate<a name="line.202"></a>
+<span class="sourceLineNo">203</span>   * @return true if the znode is watched by us<a name="line.203"></a>
+<span class="sourceLineNo">204</span>   */<a name="line.204"></a>
+<span class="sourceLineNo">205</span>  abstract boolean validate(String path);<a name="line.205"></a>
+<span class="sourceLineNo">206</span><a name="line.206"></a>
+<span class="sourceLineNo">207</span>  /**<a name="line.207"></a>
+<span class="sourceLineNo">208</span>   * @return the znode(s) to watch<a name="line.208"></a>
+<span class="sourceLineNo">209</span>   */<a name="line.209"></a>
+<span class="sourceLineNo">210</span>  abstract Collection&lt;String&gt; getNodesToWatch();<a name="line.210"></a>
+<span class="sourceLineNo">211</span><a name="line.211"></a>
+<span class="sourceLineNo">212</span>  /**<a name="line.212"></a>
+<span class="sourceLineNo">213</span>   * Thread to synchronize znode data to client ZK cluster<a name="line.213"></a>
+<span class="sourceLineNo">214</span>   */<a name="line.214"></a>
+<span class="sourceLineNo">215</span>  class ClientZkUpdater extends Thread {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    final String znode;<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    final BlockingQueue&lt;byte[]&gt; queue;<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>    public ClientZkUpdater(String znode, BlockingQueue&lt;byte[]&gt; queue) {<a name="line.219"></a>
+<span class="sourceLineNo">220</span>      this.znode = znode;<a name="line.220"></a>
+<span class="sourceLineNo">221</span>      this.queue = queue;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      setName("ClientZKUpdater-" + znode);<a name="line.222"></a>
+<span class="sourceLineNo">223</span>    }<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>    @Override<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    public void run() {<a name="line.226"></a>
+<span class="sourceLineNo">227</span>      while (!server.isStopped()) {<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        try {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>          byte[] data = queue.take();<a name="line.229"></a>
+<span class="sourceLineNo">230</span>          setDataForClientZkUntilSuccess(znode, data);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>        } catch (InterruptedException e) {<a name="line.231"></a>
+<span class="sourceLineNo">232</span>          if (LOG.isDebugEnabled()) {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>            LOG.debug(<a name="line.233"></a>
+<span class="sourceLineNo">234</span>              "Interrupted while checking whether need to update meta location to client zk");<a name="line.234"></a>
+<span class="sourceLineNo">235</span>          }<a name="line.235"></a>
+<span class="sourceLineNo">236</span>          Thread.currentThread().interrupt();<a name="line.236"></a>
+<span class="sourceLineNo">237</span>          break;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>        }<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      }<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    }<a name="line.240"></a>
+<span class="sourceLineNo">241</span>  }<a name="line.241"></a>
+<span class="sourceLineNo">242</span>}<a name="line.242"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/net/Address.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/net/Address.html b/devapidocs/src-html/org/apache/hadoop/hbase/net/Address.html
index 66aba38..c893aec 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/net/Address.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/net/Address.html
@@ -25,7 +25,7 @@
 <span class="sourceLineNo">017</span> */<a name="line.17"></a>
 <span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.net;<a name="line.18"></a>
 <span class="sourceLineNo">019</span><a name="line.19"></a>
-<span class="sourceLineNo">020</span>import org.apache.commons.lang.StringUtils;<a name="line.20"></a>
+<span class="sourceLineNo">020</span>import org.apache.commons.lang3.StringUtils;<a name="line.20"></a>
 <span class="sourceLineNo">021</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.21"></a>
 <span class="sourceLineNo">022</span><a name="line.22"></a>
 <span class="sourceLineNo">023</span>import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort;<a name="line.23"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.CacheKey.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.CacheKey.html b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.CacheKey.html
index 8d7d4b7..30fe071 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.CacheKey.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.CacheKey.html
@@ -27,7 +27,7 @@
 <span class="sourceLineNo">019</span>import java.util.Objects;<a name="line.19"></a>
 <span class="sourceLineNo">020</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.20"></a>
 <span class="sourceLineNo">021</span><a name="line.21"></a>
-<span class="sourceLineNo">022</span>import org.apache.commons.lang.builder.HashCodeBuilder;<a name="line.22"></a>
+<span class="sourceLineNo">022</span>import org.apache.commons.lang3.builder.HashCodeBuilder;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import org.apache.hadoop.conf.Configuration;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import org.apache.hadoop.fs.FileSystem;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.TableName;<a name="line.25"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.html
index 8d7d4b7..30fe071 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.html
@@ -27,7 +27,7 @@
 <span class="sourceLineNo">019</span>import java.util.Objects;<a name="line.19"></a>
 <span class="sourceLineNo">020</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.20"></a>
 <span class="sourceLineNo">021</span><a name="line.21"></a>
-<span class="sourceLineNo">022</span>import org.apache.commons.lang.builder.HashCodeBuilder;<a name="line.22"></a>
+<span class="sourceLineNo">022</span>import org.apache.commons.lang3.builder.HashCodeBuilder;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import org.apache.hadoop.conf.Configuration;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import org.apache.hadoop.fs.FileSystem;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.TableName;<a name="line.25"></a>


[05/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterAndPath.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterAndPath.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterAndPath.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterAndPath.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterAndPath.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterThread.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterThread.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterThread.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterThread.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.WriterThread.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/export_control.html
----------------------------------------------------------------------
diff --git a/export_control.html b/export_control.html
index e02ad4e..84f037c 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Export Control
@@ -331,7 +331,7 @@ for more details.</p>
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/index.html
----------------------------------------------------------------------
diff --git a/index.html b/index.html
index 79e4517..7c1057b 100644
--- a/index.html
+++ b/index.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBaseâ„¢ Home</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -409,7 +409,7 @@ Apache HBase is an open-source, distributed, versioned, non-relational database
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/integration.html
----------------------------------------------------------------------
diff --git a/integration.html b/integration.html
index 8af4390..a17c968 100644
--- a/integration.html
+++ b/integration.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; CI Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -291,7 +291,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/issue-tracking.html
----------------------------------------------------------------------
diff --git a/issue-tracking.html b/issue-tracking.html
index 98e3464..bebda7d 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Issue Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -288,7 +288,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/license.html
----------------------------------------------------------------------
diff --git a/license.html b/license.html
index afba063..f3d6b45 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Licenses</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -491,7 +491,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/mail-lists.html
----------------------------------------------------------------------
diff --git a/mail-lists.html b/mail-lists.html
index 3c4260d..124299f 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Mailing Lists</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -341,7 +341,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/metrics.html
----------------------------------------------------------------------
diff --git a/metrics.html b/metrics.html
index 5a60f34..d7dea1b 100644
--- a/metrics.html
+++ b/metrics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) Metrics
@@ -459,7 +459,7 @@ export HBASE_REGIONSERVER_OPTS=&quot;$HBASE_JMX_OPTS -Dcom.sun.management.jmxrem
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/old_news.html
----------------------------------------------------------------------
diff --git a/old_news.html b/old_news.html
index 70fd4a5..b09b137 100644
--- a/old_news.html
+++ b/old_news.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Old Apache HBase (TM) News
@@ -440,7 +440,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/plugin-management.html
----------------------------------------------------------------------
diff --git a/plugin-management.html b/plugin-management.html
index 61cb343..0b3ca28 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Plugin Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -440,7 +440,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/plugins.html
----------------------------------------------------------------------
diff --git a/plugins.html b/plugins.html
index 4789590..ee4439d 100644
--- a/plugins.html
+++ b/plugins.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Plugins</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -375,7 +375,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/poweredbyhbase.html
----------------------------------------------------------------------
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index 9782cd8..0f0ae76 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Powered By Apache HBase™</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -769,7 +769,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/project-info.html
----------------------------------------------------------------------
diff --git a/project-info.html b/project-info.html
index 009aab9..87c9361 100644
--- a/project-info.html
+++ b/project-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -335,7 +335,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/project-reports.html
----------------------------------------------------------------------
diff --git a/project-reports.html b/project-reports.html
index 19e4b8e..a409fdd 100644
--- a/project-reports.html
+++ b/project-reports.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Generated Reports</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -305,7 +305,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/project-summary.html
----------------------------------------------------------------------
diff --git a/project-summary.html b/project-summary.html
index 0ff8532..75a2527 100644
--- a/project-summary.html
+++ b/project-summary.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Summary</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -331,7 +331,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/pseudo-distributed.html
----------------------------------------------------------------------
diff --git a/pseudo-distributed.html b/pseudo-distributed.html
index 3c1456e..b8e0d32 100644
--- a/pseudo-distributed.html
+++ b/pseudo-distributed.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
 Running Apache HBase (TM) in pseudo-distributed mode
@@ -308,7 +308,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/replication.html
----------------------------------------------------------------------
diff --git a/replication.html b/replication.html
index fc2e377..9195e99 100644
--- a/replication.html
+++ b/replication.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Apache HBase (TM) Replication
@@ -303,7 +303,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/resources.html
----------------------------------------------------------------------
diff --git a/resources.html b/resources.html
index 47ac2b9..aca09b7 100644
--- a/resources.html
+++ b/resources.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Other Apache HBase (TM) Resources</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -331,7 +331,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/source-repository.html
----------------------------------------------------------------------
diff --git a/source-repository.html b/source-repository.html
index 6309f26..80340d7 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Source Code Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -299,7 +299,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/sponsors.html
----------------------------------------------------------------------
diff --git a/sponsors.html b/sponsors.html
index b4b70ec..b82ad87 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBase™ Sponsors</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -333,7 +333,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/supportingprojects.html
----------------------------------------------------------------------
diff --git a/supportingprojects.html b/supportingprojects.html
index 7c8af8e..0dff428 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Supporting Projects</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -520,7 +520,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/team-list.html
----------------------------------------------------------------------
diff --git a/team-list.html b/team-list.html
index 830fab3..b80896a 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Team</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.5-HBASE.min.css" />
@@ -730,7 +730,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/org/apache/hadoop/hbase/TestClusterPortAssignment.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/TestClusterPortAssignment.html b/testdevapidocs/org/apache/hadoop/hbase/TestClusterPortAssignment.html
index a1823f6..0e14e06 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/TestClusterPortAssignment.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/TestClusterPortAssignment.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.35">TestClusterPortAssignment</a>
+<pre>public class <a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.36">TestClusterPortAssignment</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -134,7 +134,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/TestClusterPortAssignment.html#CLASS_RULE">CLASS_RULE</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/hadoop/hbase/TestClusterPortAssignment.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
@@ -208,7 +208,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.37">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.38">CLASS_RULE</a></pre>
 </li>
 </ul>
 <a name="TEST_UTIL">
@@ -217,7 +217,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>TEST_UTIL</h4>
-<pre>private static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.40">TEST_UTIL</a></pre>
+<pre>private static final&nbsp;<a href="../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.41">TEST_UTIL</a></pre>
 </li>
 </ul>
 <a name="LOG">
@@ -226,7 +226,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.41">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.42">LOG</a></pre>
 </li>
 </ul>
 </li>
@@ -243,7 +243,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestClusterPortAssignment</h4>
-<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.35">TestClusterPortAssignment</a>()</pre>
+<pre>public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.36">TestClusterPortAssignment</a>()</pre>
 </li>
 </ul>
 </li>
@@ -260,7 +260,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testClusterPortAssignment</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.48">testClusterPortAssignment</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/TestClusterPortAssignment.html#line.49">testClusterPortAssignment</a>()
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <div class="block">Check that we can start an HBase cluster specifying a custom set of
  RPC and infoserver ports.</div>


[27/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
Published site at 914de1141699142bce1486468a742233d9440b23.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/1facf1d3
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/1facf1d3
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/1facf1d3

Branch: refs/heads/asf-site
Commit: 1facf1d3a2f9f2ac8acde077e1816c9783f88f87
Parents: 12c47ed
Author: jenkins <bu...@apache.org>
Authored: Fri Apr 20 14:46:29 2018 +0000
Committer: jenkins <bu...@apache.org>
Committed: Fri Apr 20 14:46:29 2018 +0000

----------------------------------------------------------------------
 acid-semantics.html                             |     4 +-
 apache_hbase_reference_guide.pdf                | 33165 +++++++++--------
 .../hadoop/hbase/client/RowMutations.html       |    28 +-
 .../hadoop/hbase/client/RowMutations.html       |   317 +-
 .../org/apache/hadoop/hbase/net/Address.html    |     2 +-
 .../hbase/util/Bytes.ByteArrayComparator.html   |    24 +-
 .../hbase/util/Bytes.RowEndKeyComparator.html   |    24 +-
 .../org/apache/hadoop/hbase/util/Bytes.html     |    24 +-
 book.html                                       |   214 +-
 bulk-loads.html                                 |     4 +-
 checkstyle-aggregate.html                       |    10 +-
 coc.html                                        |     4 +-
 dependencies.html                               |     4 +-
 dependency-convergence.html                     |     4 +-
 dependency-info.html                            |     4 +-
 dependency-management.html                      |     4 +-
 devapidocs/constant-values.html                 |     6 +-
 .../hbase/backup/master/BackupLogCleaner.html   |    20 +-
 .../hadoop/hbase/backup/package-tree.html       |     4 +-
 .../hadoop/hbase/client/RowMutations.html       |    32 +-
 .../hadoop/hbase/client/package-tree.html       |    18 +-
 .../hadoop/hbase/executor/package-tree.html     |     2 +-
 .../hadoop/hbase/filter/package-tree.html       |    10 +-
 .../hadoop/hbase/io/hfile/package-tree.html     |     6 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   |     2 +-
 .../hadoop/hbase/mapreduce/package-tree.html    |     4 +-
 .../hbase/master/balancer/package-tree.html     |     2 +-
 .../hadoop/hbase/master/package-tree.html       |     4 +-
 .../hbase/master/procedure/package-tree.html    |     4 +-
 .../ClientZKSyncer.ClientZkUpdater.html         |    10 +-
 .../hbase/master/zksyncer/ClientZKSyncer.html   |    34 +-
 .../org/apache/hadoop/hbase/package-tree.html   |    16 +-
 .../hadoop/hbase/procedure2/package-tree.html   |     4 +-
 ...QuotaSnapshotSizeSerializationException.html |     6 +-
 ...leArchiverNotifierImpl.SnapshotWithSize.html |    18 +-
 ...ArchiverNotifierImpl.StoreFileReference.html |    20 +-
 .../hbase/quotas/FileArchiverNotifierImpl.html  |    58 +-
 .../hbase/quotas/RegionSizeReportingChore.html  |    38 +-
 .../hbase/quotas/RegionSizeStoreImpl.html       |    30 +-
 .../hadoop/hbase/quotas/package-tree.html       |     6 +-
 .../HRegion.FlushResult.Result.html             |     4 +-
 .../hadoop/hbase/regionserver/package-tree.html |    18 +-
 .../regionserver/querymatcher/package-tree.html |     2 +-
 .../throttle/StoreHotnessProtector.html         |    46 +-
 .../hbase/security/access/package-tree.html     |     2 +-
 .../hadoop/hbase/security/package-tree.html     |     2 +-
 .../apache/hadoop/hbase/util/package-tree.html  |    10 +-
 .../apache/hadoop/hbase/wal/package-tree.html   |     2 +-
 .../org/apache/hadoop/hbase/Version.html        |     6 +-
 .../hbase/backup/master/BackupLogCleaner.html   |   235 +-
 .../hadoop/hbase/client/RowMutations.html       |   317 +-
 .../ClientZKSyncer.ClientZkUpdater.html         |   429 +-
 .../hbase/master/zksyncer/ClientZKSyncer.html   |   429 +-
 .../org/apache/hadoop/hbase/net/Address.html    |     2 +-
 ...ileArchiverNotifierFactoryImpl.CacheKey.html |     2 +-
 .../quotas/FileArchiverNotifierFactoryImpl.html |     2 +-
 ...QuotaSnapshotSizeSerializationException.html |  1197 +-
 ...leArchiverNotifierImpl.SnapshotWithSize.html |  1197 +-
 ...ArchiverNotifierImpl.StoreFileReference.html |  1197 +-
 .../hbase/quotas/FileArchiverNotifierImpl.html  |  1197 +-
 .../hbase/quotas/RegionSizeReportingChore.html  |   263 +-
 .../hbase/quotas/RegionSizeStoreImpl.html       |   167 +-
 .../HRegion.BatchOperation.Visitor.html         |   236 +-
 .../regionserver/HRegion.BatchOperation.html    |   236 +-
 .../regionserver/HRegion.BulkLoadListener.html  |   236 +-
 .../HRegion.FlushResult.Result.html             |   236 +-
 .../hbase/regionserver/HRegion.FlushResult.html |   236 +-
 .../regionserver/HRegion.FlushResultImpl.html   |   236 +-
 .../HRegion.MutationBatchOperation.html         |   236 +-
 .../HRegion.ObservedExceptionsInBatch.html      |   236 +-
 .../HRegion.PrepareFlushResult.html             |   236 +-
 .../regionserver/HRegion.RegionScannerImpl.html |   236 +-
 .../HRegion.ReplayBatchOperation.html           |   236 +-
 .../regionserver/HRegion.RowLockContext.html    |   236 +-
 .../hbase/regionserver/HRegion.RowLockImpl.html |   236 +-
 .../hbase/regionserver/HRegion.WriteState.html  |   236 +-
 .../hadoop/hbase/regionserver/HRegion.html      |   236 +-
 .../regionserver/RSRpcServices.LogDelegate.html |   216 +-
 ...SRpcServices.RegionScannerCloseCallBack.html |   216 +-
 .../RSRpcServices.RegionScannerHolder.html      |   216 +-
 ...pcServices.RegionScannerShippedCallBack.html |   216 +-
 ...RpcServices.RegionScannersCloseCallBack.html |   216 +-
 .../RSRpcServices.ScannerListener.html          |   216 +-
 .../hbase/regionserver/RSRpcServices.html       |   216 +-
 .../hadoop/hbase/regionserver/StoreScanner.html |    54 +-
 .../throttle/StoreHotnessProtector.html         |   343 +-
 .../hbase/regionserver/wal/FSWALEntry.html      |    28 +-
 .../replication/ZKReplicationQueueStorage.html  |    48 +-
 .../hbase/util/Bytes.ByteArrayComparator.html   |    24 +-
 .../hadoop/hbase/util/Bytes.Comparer.html       |    24 +-
 ...raphicalComparerHolder.PureJavaComparer.html |    24 +-
 ...ographicalComparerHolder.UnsafeComparer.html |    24 +-
 .../Bytes.LexicographicalComparerHolder.html    |    24 +-
 .../hbase/util/Bytes.RowEndKeyComparator.html   |    24 +-
 .../org/apache/hadoop/hbase/util/Bytes.html     |    24 +-
 ...tter.BoundedLogWriterCreationOutputSink.html |    98 +-
 .../WALSplitter.CorruptedLogFileException.html  |    98 +-
 .../hbase/wal/WALSplitter.EntryBuffers.html     |    98 +-
 ...WALSplitter.LogRecoveredEditsOutputSink.html |    98 +-
 .../hbase/wal/WALSplitter.MutationReplay.html   |    98 +-
 .../hbase/wal/WALSplitter.OutputSink.html       |    98 +-
 .../wal/WALSplitter.PipelineController.html     |    98 +-
 .../wal/WALSplitter.RegionEntryBuffer.html      |    98 +-
 .../hbase/wal/WALSplitter.SinkWriter.html       |    98 +-
 .../hbase/wal/WALSplitter.WriterAndPath.html    |    98 +-
 .../hbase/wal/WALSplitter.WriterThread.html     |    98 +-
 .../apache/hadoop/hbase/wal/WALSplitter.html    |    98 +-
 export_control.html                             |     4 +-
 index.html                                      |     4 +-
 integration.html                                |     4 +-
 issue-tracking.html                             |     4 +-
 license.html                                    |     4 +-
 mail-lists.html                                 |     4 +-
 metrics.html                                    |     4 +-
 old_news.html                                   |     4 +-
 plugin-management.html                          |     4 +-
 plugins.html                                    |     4 +-
 poweredbyhbase.html                             |     4 +-
 project-info.html                               |     4 +-
 project-reports.html                            |     4 +-
 project-summary.html                            |     4 +-
 pseudo-distributed.html                         |     4 +-
 replication.html                                |     4 +-
 resources.html                                  |     4 +-
 source-repository.html                          |     4 +-
 sponsors.html                                   |     4 +-
 supportingprojects.html                         |     4 +-
 team-list.html                                  |     4 +-
 .../hadoop/hbase/TestClusterPortAssignment.html |    14 +-
 .../hbase/client/TestFlushFromClient.html       |    48 +-
 .../client/TestSeparateClientZKCluster.html     |    44 +-
 ...estFailedProcCleanup.CreateFailObserver.html |     8 +-
 ...edProcCleanup.CreateFailObserverHandler.html |     8 +-
 .../hbase/procedure/TestFailedProcCleanup.html  |    28 +-
 .../hadoop/hbase/wal/TestDisabledWAL.html       |    30 +-
 .../hadoop/hbase/TestClusterPortAssignment.html |   113 +-
 .../hbase/client/TestFlushFromClient.html       |   309 +-
 .../client/TestSeparateClientZKCluster.html     |   503 +-
 ...estFailedProcCleanup.CreateFailObserver.html |   261 +-
 ...edProcCleanup.CreateFailObserverHandler.html |   261 +-
 .../hbase/procedure/TestFailedProcCleanup.html  |   261 +-
 .../regionserver/TestHdfsSnapshotHRegion.html   |     2 +-
 .../compaction/TestMajorCompactionRequest.html  |     2 +-
 .../hadoop/hbase/wal/TestDisabledWAL.html       |   173 +-
 144 files changed, 26242 insertions(+), 23692 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/acid-semantics.html
----------------------------------------------------------------------
diff --git a/acid-semantics.html b/acid-semantics.html
index dd6f320..c91d398 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) ACID Properties
@@ -601,7 +601,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 


[11/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockContext.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.RowLockImpl.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>


[17/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
index e080cd6..73a1036 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
@@ -42,604 +42,605 @@
 <span class="sourceLineNo">034</span>import java.util.function.Predicate;<a name="line.34"></a>
 <span class="sourceLineNo">035</span>import java.util.stream.Collectors;<a name="line.35"></a>
 <span class="sourceLineNo">036</span><a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.commons.lang.builder.HashCodeBuilder;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.commons.logging.Log;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.commons.logging.LogFactory;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.hadoop.conf.Configuration;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.FileStatus;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.fs.FileSystem;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.fs.Path;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.TableName;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.client.Get;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.client.Put;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.client.Result;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.client.Table;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.util.StringUtils;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.55"></a>
-<span class="sourceLineNo">056</span><a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;<a name="line.59"></a>
-<span class="sourceLineNo">060</span><a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>/**<a name="line.67"></a>
-<span class="sourceLineNo">068</span> * Tracks file archiving and updates the hbase quota table.<a name="line.68"></a>
-<span class="sourceLineNo">069</span> */<a name="line.69"></a>
-<span class="sourceLineNo">070</span>@InterfaceAudience.Private<a name="line.70"></a>
-<span class="sourceLineNo">071</span>public class FileArchiverNotifierImpl implements FileArchiverNotifier {<a name="line.71"></a>
-<span class="sourceLineNo">072</span>  private static final Log LOG = LogFactory.getLog(FileArchiverNotifierImpl.class);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>  private final Connection conn;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>  private final Configuration conf;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  private final FileSystem fs;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  private final TableName tn;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  private final ReadLock readLock;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  private final WriteLock writeLock;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  private volatile long lastFullCompute = Long.MIN_VALUE;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  private List&lt;String&gt; currentSnapshots = Collections.emptyList();<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  private static final Map&lt;String,Object&gt; NAMESPACE_LOCKS = new HashMap&lt;&gt;();<a name="line.81"></a>
-<span class="sourceLineNo">082</span><a name="line.82"></a>
-<span class="sourceLineNo">083</span>  /**<a name="line.83"></a>
-<span class="sourceLineNo">084</span>   * An Exception thrown when SnapshotSize updates to hbase:quota fail to be written.<a name="line.84"></a>
-<span class="sourceLineNo">085</span>   */<a name="line.85"></a>
-<span class="sourceLineNo">086</span>  @InterfaceAudience.Private<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  public static class QuotaSnapshotSizeSerializationException extends IOException {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    private static final long serialVersionUID = 1L;<a name="line.88"></a>
-<span class="sourceLineNo">089</span><a name="line.89"></a>
-<span class="sourceLineNo">090</span>    public QuotaSnapshotSizeSerializationException(String msg) {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>      super(msg);<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    }<a name="line.92"></a>
-<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
-<span class="sourceLineNo">094</span><a name="line.94"></a>
-<span class="sourceLineNo">095</span>  public FileArchiverNotifierImpl(<a name="line.95"></a>
-<span class="sourceLineNo">096</span>      Connection conn, Configuration conf, FileSystem fs, TableName tn) {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    this.conn = conn;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    this.conf = conf;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    this.fs = fs;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    this.tn = tn;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    readLock = lock.readLock();<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    writeLock = lock.writeLock();<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  }<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  static synchronized Object getLockForNamespace(String namespace) {<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    return NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) -&gt; new Object());<a name="line.107"></a>
-<span class="sourceLineNo">108</span>  }<a name="line.108"></a>
-<span class="sourceLineNo">109</span><a name="line.109"></a>
-<span class="sourceLineNo">110</span>  /**<a name="line.110"></a>
-<span class="sourceLineNo">111</span>   * Returns a strictly-increasing measure of time extracted by {@link System#nanoTime()}.<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   */<a name="line.112"></a>
-<span class="sourceLineNo">113</span>  long getLastFullCompute() {<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    return lastFullCompute;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>  }<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>  @Override<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  public void addArchivedFiles(Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    long start = System.nanoTime();<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    readLock.lock();<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    try {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>      // We want to catch the case where we got an archival request, but there was a full<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      // re-computation in progress that was blocking us. Most likely, the full computation is going<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      // to already include the changes we were going to make.<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      //<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      // Same as "start &lt; lastFullCompute" but avoiding numeric overflow per the<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      // System.nanoTime() javadoc<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      if (lastFullCompute != Long.MIN_VALUE &amp;&amp; start - lastFullCompute &lt; 0) {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>        if (LOG.isTraceEnabled()) {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>          LOG.trace("A full computation was performed after this request was received."<a name="line.130"></a>
-<span class="sourceLineNo">131</span>              + " Ignoring requested updates: " + fileSizes);<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        }<a name="line.132"></a>
-<span class="sourceLineNo">133</span>        return;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      }<a name="line.134"></a>
-<span class="sourceLineNo">135</span><a name="line.135"></a>
-<span class="sourceLineNo">136</span>      if (LOG.isTraceEnabled()) {<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        LOG.trace("currentSnapshots: " + currentSnapshots + " fileSize: "+ fileSizes);<a name="line.137"></a>
-<span class="sourceLineNo">138</span>      }<a name="line.138"></a>
-<span class="sourceLineNo">139</span><a name="line.139"></a>
-<span class="sourceLineNo">140</span>      // Write increment to quota table for the correct snapshot. Only do this if we have snapshots<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      // and some files that were archived.<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      if (!currentSnapshots.isEmpty() &amp;&amp; !fileSizes.isEmpty()) {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        // We get back the files which no snapshot referenced (the files which will be deleted soon)<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        groupArchivedFiledBySnapshotAndRecordSize(currentSnapshots, fileSizes);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      }<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    } finally {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      readLock.unlock();<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    }<a name="line.148"></a>
-<span class="sourceLineNo">149</span>  }<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>  /**<a name="line.151"></a>
-<span class="sourceLineNo">152</span>   * For each file in the map, this updates the first snapshot (lexicographic snapshot name) that<a name="line.152"></a>
-<span class="sourceLineNo">153</span>   * references this file. The result of this computation is serialized to the quota table.<a name="line.153"></a>
-<span class="sourceLineNo">154</span>   *<a name="line.154"></a>
-<span class="sourceLineNo">155</span>   * @param snapshots A collection of HBase snapshots to group the files into<a name="line.155"></a>
-<span class="sourceLineNo">156</span>   * @param fileSizes A map of file names to their sizes<a name="line.156"></a>
-<span class="sourceLineNo">157</span>   */<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  void groupArchivedFiledBySnapshotAndRecordSize(<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      List&lt;String&gt; snapshots, Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    // Make a copy as we'll modify it.<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    final Map&lt;String,Long&gt; filesToUpdate = new HashMap&lt;&gt;(fileSizes.size());<a name="line.161"></a>
-<span class="sourceLineNo">162</span>    for (Entry&lt;String,Long&gt; entry : fileSizes) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      filesToUpdate.put(entry.getKey(), entry.getValue());<a name="line.163"></a>
-<span class="sourceLineNo">164</span>    }<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    // Track the change in size to each snapshot<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    final Map&lt;String,Long&gt; snapshotSizeChanges = new HashMap&lt;&gt;();<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    for (String snapshot : snapshots) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>      // For each file in `filesToUpdate`, check if `snapshot` refers to it.<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      // If `snapshot` does, remove it from `filesToUpdate` and add it to `snapshotSizeChanges`.<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      bucketFilesToSnapshot(snapshot, filesToUpdate, snapshotSizeChanges);<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      if (filesToUpdate.isEmpty()) {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        // If we have no more files recently archived, we have nothing more to check<a name="line.172"></a>
-<span class="sourceLineNo">173</span>        break;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      }<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    }<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    // We have computed changes to the snapshot size, we need to record them.<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    if (!snapshotSizeChanges.isEmpty()) {<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      if (LOG.isTraceEnabled()) {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>        LOG.trace("Writing snapshot size changes for: " + snapshotSizeChanges);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      }<a name="line.180"></a>
-<span class="sourceLineNo">181</span>      persistSnapshotSizeChanges(snapshotSizeChanges);<a name="line.181"></a>
-<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  }<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  /**<a name="line.185"></a>
-<span class="sourceLineNo">186</span>   * For the given snapshot, find all files which this {@code snapshotName} references. After a file<a name="line.186"></a>
-<span class="sourceLineNo">187</span>   * is found to be referenced by the snapshot, it is removed from {@code filesToUpdate} and<a name="line.187"></a>
-<span class="sourceLineNo">188</span>   * {@code snapshotSizeChanges} is updated in concert.<a name="line.188"></a>
-<span class="sourceLineNo">189</span>   *<a name="line.189"></a>
-<span class="sourceLineNo">190</span>   * @param snapshotName The snapshot to check<a name="line.190"></a>
-<span class="sourceLineNo">191</span>   * @param filesToUpdate A mapping of archived files to their size<a name="line.191"></a>
-<span class="sourceLineNo">192</span>   * @param snapshotSizeChanges A mapping of snapshots and their change in size<a name="line.192"></a>
-<span class="sourceLineNo">193</span>   */<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  void bucketFilesToSnapshot(<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      String snapshotName, Map&lt;String,Long&gt; filesToUpdate, Map&lt;String,Long&gt; snapshotSizeChanges)<a name="line.195"></a>
-<span class="sourceLineNo">196</span>          throws IOException {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    // A quick check to avoid doing work if the caller unnecessarily invoked this method.<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    if (filesToUpdate.isEmpty()) {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      return;<a name="line.199"></a>
-<span class="sourceLineNo">200</span>    }<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>    Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(<a name="line.202"></a>
-<span class="sourceLineNo">203</span>        snapshotName, FSUtils.getRootDir(conf));<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    // For each region referenced by the snapshot<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      // For each column family in this region<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.209"></a>
-<span class="sourceLineNo">210</span>        // And each store file in that family<a name="line.210"></a>
-<span class="sourceLineNo">211</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>          Long valueOrNull = filesToUpdate.remove(sf.getName());<a name="line.212"></a>
-<span class="sourceLineNo">213</span>          if (valueOrNull != null) {<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            // This storefile was recently archived, we should update this snapshot with its size<a name="line.214"></a>
-<span class="sourceLineNo">215</span>            snapshotSizeChanges.merge(snapshotName, valueOrNull, Long::sum);<a name="line.215"></a>
-<span class="sourceLineNo">216</span>          }<a name="line.216"></a>
-<span class="sourceLineNo">217</span>          // Short-circuit, if we have no more files that were archived, we don't need to iterate<a name="line.217"></a>
-<span class="sourceLineNo">218</span>          // over the rest of the snapshot.<a name="line.218"></a>
-<span class="sourceLineNo">219</span>          if (filesToUpdate.isEmpty()) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            return;<a name="line.220"></a>
-<span class="sourceLineNo">221</span>          }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        }<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      }<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    }<a name="line.224"></a>
-<span class="sourceLineNo">225</span>  }<a name="line.225"></a>
-<span class="sourceLineNo">226</span><a name="line.226"></a>
-<span class="sourceLineNo">227</span>  /**<a name="line.227"></a>
-<span class="sourceLineNo">228</span>   * Reads the current size for each snapshot to update, generates a new update based on that value,<a name="line.228"></a>
-<span class="sourceLineNo">229</span>   * and then writes the new update.<a name="line.229"></a>
-<span class="sourceLineNo">230</span>   *<a name="line.230"></a>
-<span class="sourceLineNo">231</span>   * @param snapshotSizeChanges A map of snapshot name to size change<a name="line.231"></a>
-<span class="sourceLineNo">232</span>   */<a name="line.232"></a>
-<span class="sourceLineNo">233</span>  void persistSnapshotSizeChanges(Map&lt;String,Long&gt; snapshotSizeChanges) throws IOException {<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      // Create a list (with a more typical ordering implied)<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      final List&lt;Entry&lt;String,Long&gt;&gt; snapshotSizeEntries = new ArrayList&lt;&gt;(<a name="line.236"></a>
-<span class="sourceLineNo">237</span>          snapshotSizeChanges.entrySet());<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      // Create the Gets for each snapshot we need to update<a name="line.238"></a>
-<span class="sourceLineNo">239</span>      final List&lt;Get&gt; snapshotSizeGets = snapshotSizeEntries.stream()<a name="line.239"></a>
-<span class="sourceLineNo">240</span>          .map((e) -&gt; QuotaTableUtil.makeGetForSnapshotSize(tn, e.getKey()))<a name="line.240"></a>
-<span class="sourceLineNo">241</span>          .collect(Collectors.toList());<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      final Iterator&lt;Entry&lt;String,Long&gt;&gt; iterator = snapshotSizeEntries.iterator();<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      // A List to store each Put we'll create from the Get's we retrieve<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      final List&lt;Put&gt; updates = new ArrayList&lt;&gt;(snapshotSizeEntries.size());<a name="line.244"></a>
-<span class="sourceLineNo">245</span><a name="line.245"></a>
-<span class="sourceLineNo">246</span>      // TODO Push this down to the RegionServer with a coprocessor:<a name="line.246"></a>
-<span class="sourceLineNo">247</span>      //<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      // We would really like to piggy-back on the row-lock already being grabbed<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      // to handle the update of the row in the quota table. However, because the value<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      // is a serialized protobuf, the standard Increment API doesn't work for us. With a CP, we<a name="line.250"></a>
-<span class="sourceLineNo">251</span>      // can just send the size deltas to the RS and atomically update the serialized PB object<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      // while relying on the row-lock for synchronization.<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      //<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      // Synchronizing on the namespace string is a "minor smell" but passable as this is<a name="line.254"></a>
-<span class="sourceLineNo">255</span>      // only invoked via a single caller (the active Master). Using the namespace name lets us<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      // have some parallelism without worry of on caller seeing stale data from the quota table.<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      synchronized (getLockForNamespace(tn.getNamespaceAsString())) {<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        final Result[] existingSnapshotSizes = quotaTable.get(snapshotSizeGets);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        long totalSizeChange = 0;<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        // Read the current size values (if they exist) to generate the new value<a name="line.260"></a>
-<span class="sourceLineNo">261</span>        for (Result result : existingSnapshotSizes) {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>          Entry&lt;String,Long&gt; entry = iterator.next();<a name="line.262"></a>
-<span class="sourceLineNo">263</span>          String snapshot = entry.getKey();<a name="line.263"></a>
-<span class="sourceLineNo">264</span>          Long size = entry.getValue();<a name="line.264"></a>
-<span class="sourceLineNo">265</span>          // Track the total size change for the namespace this table belongs in<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          totalSizeChange += size;<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          // Get the size of the previous value (or zero)<a name="line.267"></a>
-<span class="sourceLineNo">268</span>          long previousSize = getSnapshotSizeFromResult(result);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>          // Create an update. A file was archived from the table, so the table's size goes<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          // down, but the snapshot's size goes up.<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          updates.add(QuotaTableUtil.createPutForSnapshotSize(tn, snapshot, previousSize + size));<a name="line.271"></a>
-<span class="sourceLineNo">272</span>        }<a name="line.272"></a>
-<span class="sourceLineNo">273</span><a name="line.273"></a>
-<span class="sourceLineNo">274</span>        // Create an update for the summation of all snapshots in the namespace<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        if (totalSizeChange != 0) {<a name="line.275"></a>
-<span class="sourceLineNo">276</span>          long previousSize = getPreviousNamespaceSnapshotSize(<a name="line.276"></a>
-<span class="sourceLineNo">277</span>              quotaTable, tn.getNamespaceAsString());<a name="line.277"></a>
-<span class="sourceLineNo">278</span>          updates.add(QuotaTableUtil.createPutForNamespaceSnapshotSize(<a name="line.278"></a>
-<span class="sourceLineNo">279</span>              tn.getNamespaceAsString(), previousSize + totalSizeChange));<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>        // Send all of the quota table updates in one batch.<a name="line.282"></a>
-<span class="sourceLineNo">283</span>        List&lt;Object&gt; failures = new ArrayList&lt;&gt;();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>        final Object[] results = new Object[updates.size()];<a name="line.284"></a>
-<span class="sourceLineNo">285</span>        quotaTable.batch(updates, results);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>        for (Object result : results) {<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          // A null result is an error condition (all RPC attempts failed)<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          if (!(result instanceof Result)) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>            failures.add(result);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>          }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>        }<a name="line.291"></a>
-<span class="sourceLineNo">292</span>        // Propagate a failure if any updates failed<a name="line.292"></a>
-<span class="sourceLineNo">293</span>        if (!failures.isEmpty()) {<a name="line.293"></a>
-<span class="sourceLineNo">294</span>          throw new QuotaSnapshotSizeSerializationException(<a name="line.294"></a>
-<span class="sourceLineNo">295</span>              "Failed to write some snapshot size updates: " + failures);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>        }<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      }<a name="line.297"></a>
-<span class="sourceLineNo">298</span>    } catch (InterruptedException e) {<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      Thread.currentThread().interrupt();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      return;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
-<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
-<span class="sourceLineNo">303</span><a name="line.303"></a>
-<span class="sourceLineNo">304</span>  /**<a name="line.304"></a>
-<span class="sourceLineNo">305</span>   * Fetches the current size of all snapshots in the given {@code namespace}.<a name="line.305"></a>
-<span class="sourceLineNo">306</span>   *<a name="line.306"></a>
-<span class="sourceLineNo">307</span>   * @param quotaTable The HBase quota table<a name="line.307"></a>
-<span class="sourceLineNo">308</span>   * @param namespace Namespace to fetch the sum of snapshot sizes for<a name="line.308"></a>
-<span class="sourceLineNo">309</span>   * @return The size of all snapshot sizes for the namespace in bytes.<a name="line.309"></a>
-<span class="sourceLineNo">310</span>   */<a name="line.310"></a>
-<span class="sourceLineNo">311</span>  long getPreviousNamespaceSnapshotSize(Table quotaTable, String namespace) throws IOException {<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    // Update the size of each snapshot for all snapshots in a namespace.<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    Result r = quotaTable.get(<a name="line.313"></a>
-<span class="sourceLineNo">314</span>        QuotaTableUtil.createGetNamespaceSnapshotSize(namespace));<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    return getSnapshotSizeFromResult(r);<a name="line.315"></a>
-<span class="sourceLineNo">316</span>  }<a name="line.316"></a>
-<span class="sourceLineNo">317</span><a name="line.317"></a>
-<span class="sourceLineNo">318</span>  /**<a name="line.318"></a>
-<span class="sourceLineNo">319</span>   * Extracts the size component from a serialized {@link SpaceQuotaSnapshot} protobuf.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   *<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @param r A Result containing one cell with a SpaceQuotaSnapshot protobuf<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @return The size in bytes of the snapshot.<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   */<a name="line.323"></a>
-<span class="sourceLineNo">324</span>  long getSnapshotSizeFromResult(Result r) throws InvalidProtocolBufferException {<a name="line.324"></a>
-<span class="sourceLineNo">325</span>    // Per javadoc, Result should only be null if an exception was thrown. So, if we're here,<a name="line.325"></a>
-<span class="sourceLineNo">326</span>    // we should be non-null. If we can't advance to the first cell, same as "no cell".<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    if (!r.isEmpty() &amp;&amp; r.advance()) {<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      return QuotaTableUtil.parseSnapshotSize(r.current());<a name="line.328"></a>
-<span class="sourceLineNo">329</span>    }<a name="line.329"></a>
-<span class="sourceLineNo">330</span>    return 0L;<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  }<a name="line.331"></a>
-<span class="sourceLineNo">332</span><a name="line.332"></a>
-<span class="sourceLineNo">333</span>  @Override<a name="line.333"></a>
-<span class="sourceLineNo">334</span>  public long computeAndStoreSnapshotSizes(<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      Collection&lt;String&gt; currentSnapshots) throws IOException {<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    // Record what the current snapshots are<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    this.currentSnapshots = new ArrayList&lt;&gt;(currentSnapshots);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    Collections.sort(this.currentSnapshots);<a name="line.338"></a>
-<span class="sourceLineNo">339</span><a name="line.339"></a>
-<span class="sourceLineNo">340</span>    // compute new size for table + snapshots for that table<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    List&lt;SnapshotWithSize&gt; snapshotSizes = computeSnapshotSizes(this.currentSnapshots);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    if (LOG.isTraceEnabled()) {<a name="line.342"></a>
-<span class="sourceLineNo">343</span>      LOG.trace("Computed snapshot sizes for " + tn + " of " + snapshotSizes);<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    }<a name="line.344"></a>
-<span class="sourceLineNo">345</span><a name="line.345"></a>
-<span class="sourceLineNo">346</span>    // Compute the total size of all snapshots against our table<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    final long totalSnapshotSize = snapshotSizes.stream().mapToLong((sws) -&gt; sws.getSize()).sum();<a name="line.347"></a>
-<span class="sourceLineNo">348</span><a name="line.348"></a>
-<span class="sourceLineNo">349</span>    writeLock.lock();<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    try {<a name="line.350"></a>
-<span class="sourceLineNo">351</span>      // Persist the size of each snapshot<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.352"></a>
-<span class="sourceLineNo">353</span>        persistSnapshotSizes(quotaTable, snapshotSizes);<a name="line.353"></a>
-<span class="sourceLineNo">354</span>      }<a name="line.354"></a>
-<span class="sourceLineNo">355</span><a name="line.355"></a>
-<span class="sourceLineNo">356</span>      // Report the last time we did a recomputation<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      lastFullCompute = System.nanoTime();<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>      return totalSnapshotSize;<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    } finally {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>      writeLock.unlock();<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    }<a name="line.362"></a>
-<span class="sourceLineNo">363</span>  }<a name="line.363"></a>
-<span class="sourceLineNo">364</span><a name="line.364"></a>
-<span class="sourceLineNo">365</span>  @Override<a name="line.365"></a>
-<span class="sourceLineNo">366</span>  public String toString() {<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    StringBuilder sb = new StringBuilder();<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    sb.append(getClass().getSimpleName()).append("[");<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    sb.append("tableName=").append(tn).append(", currentSnapshots=");<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    sb.append(currentSnapshots).append(", lastFullCompute=").append(lastFullCompute);<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    return sb.append("]").toString();<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  }<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * Computes the size of each snapshot against the table referenced by {@code this}.<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   *<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   * @param snapshots A sorted list of snapshots against {@code tn}.<a name="line.377"></a>
-<span class="sourceLineNo">378</span>   * @return A list of the size for each snapshot against {@code tn}.<a name="line.378"></a>
-<span class="sourceLineNo">379</span>   */<a name="line.379"></a>
-<span class="sourceLineNo">380</span>  List&lt;SnapshotWithSize&gt; computeSnapshotSizes(List&lt;String&gt; snapshots) throws IOException {<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    final List&lt;SnapshotWithSize&gt; snapshotSizes = new ArrayList&lt;&gt;(snapshots.size());<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    final Path rootDir = FSUtils.getRootDir(conf);<a name="line.382"></a>
-<span class="sourceLineNo">383</span><a name="line.383"></a>
-<span class="sourceLineNo">384</span>    // Get the map of store file names to store file path for this table<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    final Set&lt;String&gt; tableReferencedStoreFiles;<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    try {<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      tableReferencedStoreFiles = FSUtils.getTableStoreFilePathMap(fs, rootDir).keySet();<a name="line.387"></a>
-<span class="sourceLineNo">388</span>    } catch (InterruptedException e) {<a name="line.388"></a>
-<span class="sourceLineNo">389</span>      Thread.currentThread().interrupt();<a name="line.389"></a>
-<span class="sourceLineNo">390</span>      return null;<a name="line.390"></a>
-<span class="sourceLineNo">391</span>    }<a name="line.391"></a>
-<span class="sourceLineNo">392</span><a name="line.392"></a>
-<span class="sourceLineNo">393</span>    if (LOG.isTraceEnabled()) {<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      LOG.trace("Paths for " + tn + ": " + tableReferencedStoreFiles);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>    }<a name="line.395"></a>
-<span class="sourceLineNo">396</span><a name="line.396"></a>
-<span class="sourceLineNo">397</span>    // For each snapshot on this table, get the files which the snapshot references which<a name="line.397"></a>
-<span class="sourceLineNo">398</span>    // the table does not.<a name="line.398"></a>
-<span class="sourceLineNo">399</span>    Set&lt;String&gt; snapshotReferencedFiles = new HashSet&lt;&gt;();<a name="line.399"></a>
-<span class="sourceLineNo">400</span>    for (String snapshotName : snapshots) {<a name="line.400"></a>
-<span class="sourceLineNo">401</span>      Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, rootDir);<a name="line.401"></a>
-<span class="sourceLineNo">402</span>      SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.402"></a>
-<span class="sourceLineNo">403</span>      SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.403"></a>
-<span class="sourceLineNo">404</span><a name="line.404"></a>
-<span class="sourceLineNo">405</span>      if (LOG.isTraceEnabled()) {<a name="line.405"></a>
-<span class="sourceLineNo">406</span>        LOG.trace("Files referenced by other snapshots: " + snapshotReferencedFiles);<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      }<a name="line.407"></a>
-<span class="sourceLineNo">408</span><a name="line.408"></a>
-<span class="sourceLineNo">409</span>      // Get the set of files from the manifest that this snapshot references which are not also<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      // referenced by the originating table.<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      Set&lt;StoreFileReference&gt; unreferencedStoreFileNames = getStoreFilesFromSnapshot(<a name="line.411"></a>
-<span class="sourceLineNo">412</span>          manifest, (sfn) -&gt; !tableReferencedStoreFiles.contains(sfn)<a name="line.412"></a>
-<span class="sourceLineNo">413</span>              &amp;&amp; !snapshotReferencedFiles.contains(sfn));<a name="line.413"></a>
-<span class="sourceLineNo">414</span><a name="line.414"></a>
-<span class="sourceLineNo">415</span>      if (LOG.isTraceEnabled()) {<a name="line.415"></a>
-<span class="sourceLineNo">416</span>        LOG.trace("Snapshot " + snapshotName + " solely references the files: "<a name="line.416"></a>
-<span class="sourceLineNo">417</span>            + unreferencedStoreFileNames);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      }<a name="line.418"></a>
-<span class="sourceLineNo">419</span><a name="line.419"></a>
-<span class="sourceLineNo">420</span>      // Compute the size of the store files for this snapshot<a name="line.420"></a>
-<span class="sourceLineNo">421</span>      long size = getSizeOfStoreFiles(tn, unreferencedStoreFileNames);<a name="line.421"></a>
-<span class="sourceLineNo">422</span>      if (LOG.isTraceEnabled()) {<a name="line.422"></a>
-<span class="sourceLineNo">423</span>        LOG.trace("Computed size of " + snapshotName + " to be " + size);<a name="line.423"></a>
-<span class="sourceLineNo">424</span>      }<a name="line.424"></a>
-<span class="sourceLineNo">425</span><a name="line.425"></a>
-<span class="sourceLineNo">426</span>      // Persist this snapshot's size into the map<a name="line.426"></a>
-<span class="sourceLineNo">427</span>      snapshotSizes.add(new SnapshotWithSize(snapshotName, size));<a name="line.427"></a>
-<span class="sourceLineNo">428</span><a name="line.428"></a>
-<span class="sourceLineNo">429</span>      // Make sure that we don't double-count the same file<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      for (StoreFileReference ref : unreferencedStoreFileNames) {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        for (String fileNames : ref.getFamilyToFilesMapping().values()) {<a name="line.431"></a>
-<span class="sourceLineNo">432</span>          snapshotReferencedFiles.add(fileNames);<a name="line.432"></a>
-<span class="sourceLineNo">433</span>        }<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      }<a name="line.434"></a>
-<span class="sourceLineNo">435</span>    }<a name="line.435"></a>
-<span class="sourceLineNo">436</span><a name="line.436"></a>
-<span class="sourceLineNo">437</span>    return snapshotSizes;<a name="line.437"></a>
-<span class="sourceLineNo">438</span>  }<a name="line.438"></a>
-<span class="sourceLineNo">439</span><a name="line.439"></a>
-<span class="sourceLineNo">440</span>  /**<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   * Computes the size of each store file in {@code storeFileNames}<a name="line.441"></a>
-<span class="sourceLineNo">442</span>   */<a name="line.442"></a>
-<span class="sourceLineNo">443</span>  long getSizeOfStoreFiles(TableName tn, Set&lt;StoreFileReference&gt; storeFileNames) {<a name="line.443"></a>
-<span class="sourceLineNo">444</span>    return storeFileNames.stream()<a name="line.444"></a>
-<span class="sourceLineNo">445</span>        .collect(Collectors.summingLong((sfr) -&gt; getSizeOfStoreFile(tn, sfr)));<a name="line.445"></a>
-<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>  /**<a name="line.448"></a>
-<span class="sourceLineNo">449</span>   * Computes the size of the store files for a single region.<a name="line.449"></a>
-<span class="sourceLineNo">450</span>   */<a name="line.450"></a>
-<span class="sourceLineNo">451</span>  long getSizeOfStoreFile(TableName tn, StoreFileReference storeFileName) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>    String regionName = storeFileName.getRegionName();<a name="line.452"></a>
-<span class="sourceLineNo">453</span>    return storeFileName.getFamilyToFilesMapping()<a name="line.453"></a>
-<span class="sourceLineNo">454</span>        .entries().stream()<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        .collect(Collectors.summingLong((e) -&gt;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>            getSizeOfStoreFile(tn, regionName, e.getKey(), e.getValue())));<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * Computes the size of the store file given its name, region and family name in<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   * the archive directory.<a name="line.461"></a>
-<span class="sourceLineNo">462</span>   */<a name="line.462"></a>
-<span class="sourceLineNo">463</span>  long getSizeOfStoreFile(<a name="line.463"></a>
-<span class="sourceLineNo">464</span>      TableName tn, String regionName, String family, String storeFile) {<a name="line.464"></a>
-<span class="sourceLineNo">465</span>    Path familyArchivePath;<a name="line.465"></a>
-<span class="sourceLineNo">466</span>    try {<a name="line.466"></a>
-<span class="sourceLineNo">467</span>      familyArchivePath = HFileArchiveUtil.getStoreArchivePath(conf, tn, regionName, family);<a name="line.467"></a>
-<span class="sourceLineNo">468</span>    } catch (IOException e) {<a name="line.468"></a>
-<span class="sourceLineNo">469</span>      LOG.warn("Could not compute path for the archive directory for the region", e);<a name="line.469"></a>
-<span class="sourceLineNo">470</span>      return 0L;<a name="line.470"></a>
-<span class="sourceLineNo">471</span>    }<a name="line.471"></a>
-<span class="sourceLineNo">472</span>    Path fileArchivePath = new Path(familyArchivePath, storeFile);<a name="line.472"></a>
-<span class="sourceLineNo">473</span>    try {<a name="line.473"></a>
-<span class="sourceLineNo">474</span>      if (fs.exists(fileArchivePath)) {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>        FileStatus[] status = fs.listStatus(fileArchivePath);<a name="line.475"></a>
-<span class="sourceLineNo">476</span>        if (1 != status.length) {<a name="line.476"></a>
-<span class="sourceLineNo">477</span>          LOG.warn("Expected " + fileArchivePath +<a name="line.477"></a>
-<span class="sourceLineNo">478</span>              " to be a file but was a directory, ignoring reference");<a name="line.478"></a>
-<span class="sourceLineNo">479</span>          return 0L;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>        }<a name="line.480"></a>
-<span class="sourceLineNo">481</span>        return status[0].getLen();<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      }<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    } catch (IOException e) {<a name="line.483"></a>
-<span class="sourceLineNo">484</span>      LOG.warn("Could not obtain the status of " + fileArchivePath, e);<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      return 0L;<a name="line.485"></a>
-<span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    LOG.warn("Expected " + fileArchivePath + " to exist but does not, ignoring reference.");<a name="line.487"></a>
-<span class="sourceLineNo">488</span>    return 0L;<a name="line.488"></a>
-<span class="sourceLineNo">489</span>  }<a name="line.489"></a>
-<span class="sourceLineNo">490</span><a name="line.490"></a>
-<span class="sourceLineNo">491</span>  /**<a name="line.491"></a>
-<span class="sourceLineNo">492</span>   * Extracts the names of the store files referenced by this snapshot which satisfy the given<a name="line.492"></a>
-<span class="sourceLineNo">493</span>   * predicate (the predicate returns {@code true}).<a name="line.493"></a>
-<span class="sourceLineNo">494</span>   */<a name="line.494"></a>
-<span class="sourceLineNo">495</span>  Set&lt;StoreFileReference&gt; getStoreFilesFromSnapshot(<a name="line.495"></a>
-<span class="sourceLineNo">496</span>      SnapshotManifest manifest, Predicate&lt;String&gt; filter) {<a name="line.496"></a>
-<span class="sourceLineNo">497</span>    Set&lt;StoreFileReference&gt; references = new HashSet&lt;&gt;();<a name="line.497"></a>
-<span class="sourceLineNo">498</span>    // For each region referenced by the snapshot<a name="line.498"></a>
-<span class="sourceLineNo">499</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.499"></a>
-<span class="sourceLineNo">500</span>      StoreFileReference regionReference = new StoreFileReference(<a name="line.500"></a>
-<span class="sourceLineNo">501</span>          ProtobufUtil.toRegionInfo(rm.getRegionInfo()).getEncodedName());<a name="line.501"></a>
-<span class="sourceLineNo">502</span><a name="line.502"></a>
-<span class="sourceLineNo">503</span>      // For each column family in this region<a name="line.503"></a>
-<span class="sourceLineNo">504</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.504"></a>
-<span class="sourceLineNo">505</span>        final String familyName = ff.getFamilyName().toStringUtf8();<a name="line.505"></a>
-<span class="sourceLineNo">506</span>        // And each store file in that family<a name="line.506"></a>
-<span class="sourceLineNo">507</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.507"></a>
-<span class="sourceLineNo">508</span>          String storeFileName = sf.getName();<a name="line.508"></a>
-<span class="sourceLineNo">509</span>          // A snapshot only "inherits" a files size if it uniquely refers to it (no table<a name="line.509"></a>
-<span class="sourceLineNo">510</span>          // and no other snapshot references it).<a name="line.510"></a>
-<span class="sourceLineNo">511</span>          if (filter.test(storeFileName)) {<a name="line.511"></a>
-<span class="sourceLineNo">512</span>            regionReference.addFamilyStoreFile(familyName, storeFileName);<a name="line.512"></a>
-<span class="sourceLineNo">513</span>          }<a name="line.513"></a>
-<span class="sourceLineNo">514</span>        }<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      }<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      // Only add this Region reference if we retained any files.<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      if (!regionReference.getFamilyToFilesMapping().isEmpty()) {<a name="line.517"></a>
-<span class="sourceLineNo">518</span>        references.add(regionReference);<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      }<a name="line.519"></a>
-<span class="sourceLineNo">520</span>    }<a name="line.520"></a>
-<span class="sourceLineNo">521</span>    return references;<a name="line.521"></a>
-<span class="sourceLineNo">522</span>  }<a name="line.522"></a>
-<span class="sourceLineNo">523</span><a name="line.523"></a>
-<span class="sourceLineNo">524</span>  /**<a name="line.524"></a>
-<span class="sourceLineNo">525</span>   * Writes the snapshot sizes to the provided {@code table}.<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   */<a name="line.526"></a>
-<span class="sourceLineNo">527</span>  void persistSnapshotSizes(<a name="line.527"></a>
-<span class="sourceLineNo">528</span>      Table table, List&lt;SnapshotWithSize&gt; snapshotSizes) throws IOException {<a name="line.528"></a>
-<span class="sourceLineNo">529</span>    // Convert each entry in the map to a Put and write them to the quota table<a name="line.529"></a>
-<span class="sourceLineNo">530</span>    table.put(snapshotSizes<a name="line.530"></a>
-<span class="sourceLineNo">531</span>        .stream()<a name="line.531"></a>
-<span class="sourceLineNo">532</span>        .map(sws -&gt; QuotaTableUtil.createPutForSnapshotSize(<a name="line.532"></a>
-<span class="sourceLineNo">533</span>            tn, sws.getName(), sws.getSize()))<a name="line.533"></a>
-<span class="sourceLineNo">534</span>        .collect(Collectors.toList()));<a name="line.534"></a>
-<span class="sourceLineNo">535</span>  }<a name="line.535"></a>
-<span class="sourceLineNo">536</span><a name="line.536"></a>
-<span class="sourceLineNo">537</span>  /**<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   * A struct encapsulating the name of a snapshot and its "size" on the filesystem. This size is<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   * defined as the amount of filesystem space taken by the files the snapshot refers to which<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * the originating table no longer refers to.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   */<a name="line.541"></a>
-<span class="sourceLineNo">542</span>  static class SnapshotWithSize {<a name="line.542"></a>
-<span class="sourceLineNo">543</span>    private final String name;<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    private final long size;<a name="line.544"></a>
-<span class="sourceLineNo">545</span><a name="line.545"></a>
-<span class="sourceLineNo">546</span>    SnapshotWithSize(String name, long size) {<a name="line.546"></a>
-<span class="sourceLineNo">547</span>      this.name = Objects.requireNonNull(name);<a name="line.547"></a>
-<span class="sourceLineNo">548</span>      this.size = size;<a name="line.548"></a>
-<span class="sourceLineNo">549</span>    }<a name="line.549"></a>
-<span class="sourceLineNo">550</span><a name="line.550"></a>
-<span class="sourceLineNo">551</span>    String getName() {<a name="line.551"></a>
-<span class="sourceLineNo">552</span>      return name;<a name="line.552"></a>
-<span class="sourceLineNo">553</span>    }<a name="line.553"></a>
-<span class="sourceLineNo">554</span><a name="line.554"></a>
-<span class="sourceLineNo">555</span>    long getSize() {<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      return size;<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    }<a name="line.557"></a>
-<span class="sourceLineNo">558</span><a name="line.558"></a>
-<span class="sourceLineNo">559</span>    @Override<a name="line.559"></a>
-<span class="sourceLineNo">560</span>    public int hashCode() {<a name="line.560"></a>
-<span class="sourceLineNo">561</span>      return new HashCodeBuilder().append(name).append(size).toHashCode();<a name="line.561"></a>
-<span class="sourceLineNo">562</span>    }<a name="line.562"></a>
-<span class="sourceLineNo">563</span><a name="line.563"></a>
-<span class="sourceLineNo">564</span>    @Override<a name="line.564"></a>
-<span class="sourceLineNo">565</span>    public boolean equals(Object o) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>      if (this == o) {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>        return true;<a name="line.567"></a>
-<span class="sourceLineNo">568</span>      }<a name="line.568"></a>
-<span class="sourceLineNo">569</span><a name="line.569"></a>
-<span class="sourceLineNo">570</span>      if (!(o instanceof SnapshotWithSize)) {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>        return false;<a name="line.571"></a>
-<span class="sourceLineNo">572</span>      }<a name="line.572"></a>
-<span class="sourceLineNo">573</span><a name="line.573"></a>
-<span class="sourceLineNo">574</span>      SnapshotWithSize other = (SnapshotWithSize) o;<a name="line.574"></a>
-<span class="sourceLineNo">575</span>      return name.equals(other.name) &amp;&amp; size == other.size;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>    }<a name="line.576"></a>
-<span class="sourceLineNo">577</span><a name="line.577"></a>
-<span class="sourceLineNo">578</span>    @Override<a name="line.578"></a>
-<span class="sourceLineNo">579</span>    public String toString() {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>      StringBuilder sb = new StringBuilder(32);<a name="line.580"></a>
-<span class="sourceLineNo">581</span>      return sb.append("SnapshotWithSize:[").append(name).append(" ")<a name="line.581"></a>
-<span class="sourceLineNo">582</span>          .append(StringUtils.byteDesc(size)).append("]").toString();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>    }<a name="line.583"></a>
-<span class="sourceLineNo">584</span>  }<a name="line.584"></a>
-<span class="sourceLineNo">585</span><a name="line.585"></a>
-<span class="sourceLineNo">586</span>  /**<a name="line.586"></a>
-<span class="sourceLineNo">587</span>   * A reference to a collection of files in the archive directory for a single region.<a name="line.587"></a>
-<span class="sourceLineNo">588</span>   */<a name="line.588"></a>
-<span class="sourceLineNo">589</span>  static class StoreFileReference {<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    private final String regionName;<a name="line.590"></a>
-<span class="sourceLineNo">591</span>    private final Multimap&lt;String,String&gt; familyToFiles;<a name="line.591"></a>
-<span class="sourceLineNo">592</span><a name="line.592"></a>
-<span class="sourceLineNo">593</span>    StoreFileReference(String regionName) {<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      this.regionName = Objects.requireNonNull(regionName);<a name="line.594"></a>
-<span class="sourceLineNo">595</span>      familyToFiles = HashMultimap.create();<a name="line.595"></a>
-<span class="sourceLineNo">596</span>    }<a name="line.596"></a>
-<span class="sourceLineNo">597</span><a name="line.597"></a>
-<span class="sourceLineNo">598</span>    String getRegionName() {<a name="line.598"></a>
-<span class="sourceLineNo">599</span>      return regionName;<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    }<a name="line.600"></a>
-<span class="sourceLineNo">601</span><a name="line.601"></a>
-<span class="sourceLineNo">602</span>    Multimap&lt;String,String&gt; getFamilyToFilesMapping() {<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      return familyToFiles;<a name="line.603"></a>
-<span class="sourceLineNo">604</span>    }<a name="line.604"></a>
-<span class="sourceLineNo">605</span><a name="line.605"></a>
-<span class="sourceLineNo">606</span>    void addFamilyStoreFile(String family, String storeFileName) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>      familyToFiles.put(family, storeFileName);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>    }<a name="line.608"></a>
-<span class="sourceLineNo">609</span><a name="line.609"></a>
-<span class="sourceLineNo">610</span>    @Override<a name="line.610"></a>
-<span class="sourceLineNo">611</span>    public int hashCode() {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      return new HashCodeBuilder().append(regionName).append(familyToFiles).toHashCode();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    }<a name="line.613"></a>
-<span class="sourceLineNo">614</span><a name="line.614"></a>
-<span class="sourceLineNo">615</span>    @Override<a name="line.615"></a>
-<span class="sourceLineNo">616</span>    public boolean equals(Object o) {<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      if (this == o) {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>        return true;<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      }<a name="line.619"></a>
-<span class="sourceLineNo">620</span>      if (!(o instanceof StoreFileReference)) {<a name="line.620"></a>
-<span class="sourceLineNo">621</span>        return false;<a name="line.621"></a>
-<span class="sourceLineNo">622</span>      }<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      StoreFileReference other = (StoreFileReference) o;<a name="line.623"></a>
-<span class="sourceLineNo">624</span>      return regionName.equals(other.regionName) &amp;&amp; familyToFiles.equals(other.familyToFiles);<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    }<a name="line.625"></a>
-<span class="sourceLineNo">626</span><a name="line.626"></a>
-<span class="sourceLineNo">627</span>    @Override<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    public String toString() {<a name="line.628"></a>
-<span class="sourceLineNo">629</span>      StringBuilder sb = new StringBuilder();<a name="line.629"></a>
-<span class="sourceLineNo">630</span>      return sb.append("StoreFileReference[region=").append(regionName).append(", files=")<a name="line.630"></a>
-<span class="sourceLineNo">631</span>          .append(familyToFiles).append("]").toString();<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    }<a name="line.632"></a>
-<span class="sourceLineNo">633</span>  }<a name="line.633"></a>
-<span class="sourceLineNo">634</span>}<a name="line.634"></a>
+<span class="sourceLineNo">037</span>import org.apache.commons.lang3.builder.HashCodeBuilder;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.conf.Configuration;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.hadoop.fs.FileStatus;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.apache.hadoop.fs.FileSystem;<a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.fs.Path;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.TableName;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.client.Get;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.client.Put;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.client.Result;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.client.Table;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.util.HFileArchiveUtil;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.util.StringUtils;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.slf4j.Logger;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.slf4j.LoggerFactory;<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;<a name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;<a name="line.66"></a>
+<span class="sourceLineNo">067</span><a name="line.67"></a>
+<span class="sourceLineNo">068</span>/**<a name="line.68"></a>
+<span class="sourceLineNo">069</span> * Tracks file archiving and updates the hbase quota table.<a name="line.69"></a>
+<span class="sourceLineNo">070</span> */<a name="line.70"></a>
+<span class="sourceLineNo">071</span>@InterfaceAudience.Private<a name="line.71"></a>
+<span class="sourceLineNo">072</span>public class FileArchiverNotifierImpl implements FileArchiverNotifier {<a name="line.72"></a>
+<span class="sourceLineNo">073</span>  private static final Logger LOG = LoggerFactory.getLogger(FileArchiverNotifierImpl.class);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>  private final Connection conn;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>  private final Configuration conf;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>  private final FileSystem fs;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>  private final TableName tn;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  private final ReadLock readLock;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  private final WriteLock writeLock;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  private volatile long lastFullCompute = Long.MIN_VALUE;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  private List&lt;String&gt; currentSnapshots = Collections.emptyList();<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  private static final Map&lt;String,Object&gt; NAMESPACE_LOCKS = new HashMap&lt;&gt;();<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * An Exception thrown when SnapshotSize updates to hbase:quota fail to be written.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  @InterfaceAudience.Private<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  public static class QuotaSnapshotSizeSerializationException extends IOException {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    private static final long serialVersionUID = 1L;<a name="line.89"></a>
+<span class="sourceLineNo">090</span><a name="line.90"></a>
+<span class="sourceLineNo">091</span>    public QuotaSnapshotSizeSerializationException(String msg) {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>      super(msg);<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    }<a name="line.93"></a>
+<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
+<span class="sourceLineNo">095</span><a name="line.95"></a>
+<span class="sourceLineNo">096</span>  public FileArchiverNotifierImpl(<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      Connection conn, Configuration conf, FileSystem fs, TableName tn) {<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    this.conn = conn;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    this.conf = conf;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    this.fs = fs;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    this.tn = tn;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    readLock = lock.readLock();<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    writeLock = lock.writeLock();<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>  static synchronized Object getLockForNamespace(String namespace) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    return NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) -&gt; new Object());<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>  /**<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Returns a strictly-increasing measure of time extracted by {@link System#nanoTime()}.<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   */<a name="line.113"></a>
+<span class="sourceLineNo">114</span>  long getLastFullCompute() {<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    return lastFullCompute;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  @Override<a name="line.118"></a>
+<span class="sourceLineNo">119</span>  public void addArchivedFiles(Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    long start = System.nanoTime();<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    readLock.lock();<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    try {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>      // We want to catch the case where we got an archival request, but there was a full<a name="line.123"></a>
+<span class="sourceLineNo">124</span>      // re-computation in progress that was blocking us. Most likely, the full computation is going<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      // to already include the changes we were going to make.<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      //<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      // Same as "start &lt; lastFullCompute" but avoiding numeric overflow per the<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      // System.nanoTime() javadoc<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      if (lastFullCompute != Long.MIN_VALUE &amp;&amp; start - lastFullCompute &lt; 0) {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>        if (LOG.isTraceEnabled()) {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>          LOG.trace("A full computation was performed after this request was received."<a name="line.131"></a>
+<span class="sourceLineNo">132</span>              + " Ignoring requested updates: " + fileSizes);<a name="line.132"></a>
+<span class="sourceLineNo">133</span>        }<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        return;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      }<a name="line.135"></a>
+<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">137</span>      if (LOG.isTraceEnabled()) {<a name="line.137"></a>
+<span class="sourceLineNo">138</span>        LOG.trace("currentSnapshots: " + currentSnapshots + " fileSize: "+ fileSizes);<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      }<a name="line.139"></a>
+<span class="sourceLineNo">140</span><a name="line.140"></a>
+<span class="sourceLineNo">141</span>      // Write increment to quota table for the correct snapshot. Only do this if we have snapshots<a name="line.141"></a>
+<span class="sourceLineNo">142</span>      // and some files that were archived.<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      if (!currentSnapshots.isEmpty() &amp;&amp; !fileSizes.isEmpty()) {<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        // We get back the files which no snapshot referenced (the files which will be deleted soon)<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        groupArchivedFiledBySnapshotAndRecordSize(currentSnapshots, fileSizes);<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      }<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    } finally {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      readLock.unlock();<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    }<a name="line.149"></a>
+<span class="sourceLineNo">150</span>  }<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>  /**<a name="line.152"></a>
+<span class="sourceLineNo">153</span>   * For each file in the map, this updates the first snapshot (lexicographic snapshot name) that<a name="line.153"></a>
+<span class="sourceLineNo">154</span>   * references this file. The result of this computation is serialized to the quota table.<a name="line.154"></a>
+<span class="sourceLineNo">155</span>   *<a name="line.155"></a>
+<span class="sourceLineNo">156</span>   * @param snapshots A collection of HBase snapshots to group the files into<a name="line.156"></a>
+<span class="sourceLineNo">157</span>   * @param fileSizes A map of file names to their sizes<a name="line.157"></a>
+<span class="sourceLineNo">158</span>   */<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  void groupArchivedFiledBySnapshotAndRecordSize(<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      List&lt;String&gt; snapshots, Set&lt;Entry&lt;String, Long&gt;&gt; fileSizes) throws IOException {<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    // Make a copy as we'll modify it.<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    final Map&lt;String,Long&gt; filesToUpdate = new HashMap&lt;&gt;(fileSizes.size());<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    for (Entry&lt;String,Long&gt; entry : fileSizes) {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      filesToUpdate.put(entry.getKey(), entry.getValue());<a name="line.164"></a>
+<span class="sourceLineNo">165</span>    }<a name="line.165"></a>
+<span class="sourceLineNo">166</span>    // Track the change in size to each snapshot<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    final Map&lt;String,Long&gt; snapshotSizeChanges = new HashMap&lt;&gt;();<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    for (String snapshot : snapshots) {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      // For each file in `filesToUpdate`, check if `snapshot` refers to it.<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      // If `snapshot` does, remove it from `filesToUpdate` and add it to `snapshotSizeChanges`.<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      bucketFilesToSnapshot(snapshot, filesToUpdate, snapshotSizeChanges);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      if (filesToUpdate.isEmpty()) {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        // If we have no more files recently archived, we have nothing more to check<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        break;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      }<a name="line.175"></a>
+<span class="sourceLineNo">176</span>    }<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    // We have computed changes to the snapshot size, we need to record them.<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    if (!snapshotSizeChanges.isEmpty()) {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      if (LOG.isTraceEnabled()) {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>        LOG.trace("Writing snapshot size changes for: " + snapshotSizeChanges);<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      }<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      persistSnapshotSizeChanges(snapshotSizeChanges);<a name="line.182"></a>
+<span class="sourceLineNo">183</span>    }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>  /**<a name="line.186"></a>
+<span class="sourceLineNo">187</span>   * For the given snapshot, find all files which this {@code snapshotName} references. After a file<a name="line.187"></a>
+<span class="sourceLineNo">188</span>   * is found to be referenced by the snapshot, it is removed from {@code filesToUpdate} and<a name="line.188"></a>
+<span class="sourceLineNo">189</span>   * {@code snapshotSizeChanges} is updated in concert.<a name="line.189"></a>
+<span class="sourceLineNo">190</span>   *<a name="line.190"></a>
+<span class="sourceLineNo">191</span>   * @param snapshotName The snapshot to check<a name="line.191"></a>
+<span class="sourceLineNo">192</span>   * @param filesToUpdate A mapping of archived files to their size<a name="line.192"></a>
+<span class="sourceLineNo">193</span>   * @param snapshotSizeChanges A mapping of snapshots and their change in size<a name="line.193"></a>
+<span class="sourceLineNo">194</span>   */<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  void bucketFilesToSnapshot(<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      String snapshotName, Map&lt;String,Long&gt; filesToUpdate, Map&lt;String,Long&gt; snapshotSizeChanges)<a name="line.196"></a>
+<span class="sourceLineNo">197</span>          throws IOException {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    // A quick check to avoid doing work if the caller unnecessarily invoked this method.<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    if (filesToUpdate.isEmpty()) {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      return;<a name="line.200"></a>
+<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>    Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(<a name="line.203"></a>
+<span class="sourceLineNo">204</span>        snapshotName, FSUtils.getRootDir(conf));<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    SnapshotDescription sd = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>    SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, sd);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    // For each region referenced by the snapshot<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    for (SnapshotRegionManifest rm : manifest.getRegionManifests()) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>      // For each column family in this region<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      for (FamilyFiles ff : rm.getFamilyFilesList()) {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>        // And each store file in that family<a name="line.211"></a>
+<span class="sourceLineNo">212</span>        for (StoreFile sf : ff.getStoreFilesList()) {<a name="line.212"></a>
+<span class="sourceLineNo">213</span>          Long valueOrNull = filesToUpdate.remove(sf.getName());<a name="line.213"></a>
+<span class="sourceLineNo">214</span>          if (valueOrNull != null) {<a name="line.214"></a>
+<span class="sourceLineNo">215</span>            // This storefile was recently archived, we should update this snapshot with its size<a name="line.215"></a>
+<span class="sourceLineNo">216</span>            snapshotSizeChanges.merge(snapshotName, valueOrNull, Long::sum);<a name="line.216"></a>
+<span class="sourceLineNo">217</span>          }<a name="line.217"></a>
+<span class="sourceLineNo">218</span>          // Short-circuit, if we have no more files that were archived, we don't need to iterate<a name="line.218"></a>
+<span class="sourceLineNo">219</span>          // over the rest of the snapshot.<a name="line.219"></a>
+<span class="sourceLineNo">220</span>          if (filesToUpdate.isEmpty()) {<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            return;<a name="line.221"></a>
+<span class="sourceLineNo">222</span>          }<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        }<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      }<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    }<a name="line.225"></a>
+<span class="sourceLineNo">226</span>  }<a name="line.226"></a>
+<span class="sourceLineNo">227</span><a name="line.227"></a>
+<span class="sourceLineNo">228</span>  /**<a name="line.228"></a>
+<span class="sourceLineNo">229</span>   * Reads the current size for each snapshot to update, generates a new update based on that value,<a name="line.229"></a>
+<span class="sourceLineNo">230</span>   * and then writes the new update.<a name="line.230"></a>
+<span class="sourceLineNo">231</span>   *<a name="line.231"></a>
+<span class="sourceLineNo">232</span>   * @param snapshotSizeChanges A map of snapshot name to size change<a name="line.232"></a>
+<span class="sourceLineNo">233</span>   */<a name="line.233"></a>
+<span class="sourceLineNo">234</span>  void persistSnapshotSizeChanges(Map&lt;String,Long&gt; snapshotSizeChanges) throws IOException {<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    try (Table quotaTable = conn.getTable(QuotaTableUtil.QUOTA_TABLE_NAME)) {<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      // Create a list (with a more typical ordering implied)<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      final List&lt;Entry&lt;String,Long&gt;&gt; snapshotSizeEntries = new ArrayList&lt;&gt;(<a name="line.237"></a>
+<span class="sourceLineNo">238</span>          snapshotSizeChanges.entrySet());<a name="line.238"></a>
+<span class="sourceLineNo">239</span>      // Create the Gets for each snapshot we need to update<a name="line.239"></a>
+<span class="sourceLineNo">240</span>      final List&lt;Get&gt; snapshotSizeGets = snapshotSizeEntries.stream()<a name="line.240"></a>
+<span class="sourceLineNo">241</span>          .map((e) -&gt; QuotaTableUtil.makeGetForSnapshotSize(tn, e.getKey()))<a name="line.241"></a>
+<span class="sourceLineNo">242</span>          .collect(Collectors.toList());<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      final Iterator&lt;Entry&lt;String,Long&gt;&gt; iterator = snapshotSizeEntries.iterator();<a name="line.243"></a>
+<span class="sourceLineNo">244</span>      // A List to store each Put we'll create from the Get's we retrieve<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      final List&lt;Put&gt; updates = new ArrayList&lt;&gt;(snapshotSizeEntries.size());<a name="line.245"></a>
+<span class="sourceLineNo">246</span><a name="line.246"></a>
+<span class="sourceLineNo">247</span>      // TODO Push this down to the RegionServer with a coprocessor:<a name="line.247"></a>
+<span class="sourceLineNo">248</span>      //<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      // We would really like to piggy-back on the row-lock already being grabbed<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      // to handle the update of the row in the quota table. However, because the value<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      // is a serialized protobuf, the standard Increment API doesn't work for us. With a CP, we<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      // can just send the size deltas to the RS and atomically update the serialized PB object<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      // while relying on the row-lock for synchronization.<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      //<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      // Synchronizing on the namespace string is a "minor smell" but passable as this is<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      // only invoked via a single caller (the active Master). Using the namespace name lets us<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      // have some parallelism without worry of on caller seeing stale data from the quota table.<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      synchronized (getLockForNamespace(tn.getNamespaceAsString())) {<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        final Result[] existingSnapshotSizes = quotaTable.get(snapshotSizeGets);<a name="line.259"></a>
+<span class="sourceLineNo">260</span>        long totalSizeChange = 0;<a name="line.260"></a>
+<span class="sourceLineNo">261</span>        // Read the current size values (if they exist) to generate the new value<a name="line.261"></a>
+<span class="sourceLineNo">262</span>        for (Result result : existingSnapshotSizes) {<a name="line.262"></a>
+<span class="sourceLineNo">263</span>          Entry&lt;String,Long&gt; entry = iterator.next();<a name="line.263"></a>
+<span class="sourceLineNo">264</span>          String snapshot = entry.getKey();<a name="line.264"></a>
+<span class="sourceLineNo">265</span>          Long size = entry.getValue();<a name="line.265"></a>
+<span class="sourceLineNo">266</span>          // Track the total size change for the namespace this table belongs in<a name="line.266"></a>
+<span class="sourceLineNo">267</span>          totalSizeChange += size;<a name="line.267"></a>
+<span class="sourceLineNo">268</span>          // Get the size of the previous value (or zero)<a name="line.268"></a>
+<span class="sourceLineNo">269</span>          long previousSize = getSnapshotSizeFromResult(result);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>          // Create an update. A file was archived from the table, so the table's size goes<a name="line.270"></a>
+<span class="sourceLineNo">271</span>          // down, but the snapshot's size goes up.<a name="line.271"></a>
+<span class="sourceLineNo">272</span>          updates.add(QuotaTableUtil.createPutForSnapshotSize(tn, snapshot, previousSize + size));<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        }<a name="line.273"></a>
+<span class="sourceLineNo">274</span><a name="line.274"></a>
+<span class="sourceLineNo">275</span>        // Create an update for the summation of all snapshots in the namespace<a name="line.275"></a>
+<span class="sourceLineNo">276</span>        if (totalSizeChange != 0) {<a name="line.276"></a>
+<span class="sourceLineNo">277</span>          long previousSize = getPreviousNamespaceSnapshotSize(<a name="line.277"></a>
+<span class="sourceLineNo">278</span>              quotaTable, tn.getNamespaceAsString());<a name="line.278"></a>
+<span class="sourceLineNo">279</span>          updates.add(QuotaTableUtil.createPutForNamespaceSnapshotSize(<a name="line.279"></a>
+<span class="sourceLineNo">280</span>              tn.getNamespaceAsString(), previousSize + totalSizeChange));<a name="line.280"></a>
+<span class="sourceLineNo">281</span>        }<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>        // Send all of the quota table updates in one batch.<a name="line.283"></a>
+<span class="sourceLineNo">284</span>        List&lt;Object&gt; failures = new ArrayList&lt;&gt;();<a name="line.284"></a>
+<span class="sourceLineNo">285</span>        final Object[] results = new Object[updates.size()];<a name="line.285"></a>
+<span class="sourceLineNo">286</span>        quotaTable.batch(updates, results);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>        for (Object result : results) {<a name="line.287"></a>
+<span class="sourceLineNo">288</span>          // A null result is an error condition (all RPC attempts failed)<a name="line.288"></a>
+<span class="sourceLineNo">289</span>          if (!(result instanceof Result)) {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>            failures.add(result);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>          }<a name="line.291"></a>
+<span class="sourceLineNo">292</span>        }<a name="line.292"></a>
+<span class="sourceLineNo">293</span>        // Propagate a failure if any updates failed<a name="line.293"></a>
+<span class="sourceLineNo">294</span>        if (!failures.isEmpty()) {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>          throw new QuotaSnapshotSizeSerializationException(<a name="line.295"></a>
+<span class="sourceLineNo">

<TRUNCATED>

[25/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/apidocs/org/apache/hadoop/hbase/client/RowMutations.html
----------------------------------------------------------------------
diff --git a/apidocs/org/apache/hadoop/hbase/client/RowMutations.html b/apidocs/org/apache/hadoop/hbase/client/RowMutations.html
index 7142b04..46db553 100644
--- a/apidocs/org/apache/hadoop/hbase/client/RowMutations.html
+++ b/apidocs/org/apache/hadoop/hbase/client/RowMutations.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Public
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.41">RowMutations</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.42">RowMutations</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" title="interface in org.apache.hadoop.hbase.client">Row</a></pre>
 <div class="block">Performs multiple mutations atomically on a single row.
@@ -284,7 +284,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" titl
 <ul class="blockList">
 <li class="blockList">
 <h4>RowMutations</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.60">RowMutations</a>(byte[]&nbsp;row)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.61">RowMutations</a>(byte[]&nbsp;row)</pre>
 </li>
 </ul>
 <a name="RowMutations-byte:A-int-">
@@ -293,7 +293,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" titl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RowMutations</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.68">RowMutations</a>(byte[]&nbsp;row,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.69">RowMutations</a>(byte[]&nbsp;row,
                     int&nbsp;initialCapacity)</pre>
 <div class="block">Create an atomic mutation for the specified row.</div>
 <dl>
@@ -317,7 +317,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" titl
 <ul class="blockList">
 <li class="blockList">
 <h4>of</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.49">of</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;mutations)
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.50">of</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;mutations)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Create a <a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client"><code>RowMutations</code></a> with the specified mutations.</div>
 <dl>
@@ -337,7 +337,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/client/Row.html" titl
 <li class="blockList">
 <h4>add</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.85">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client">Put</a>&nbsp;p)
+public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.86">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client">Put</a>&nbsp;p)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">since 2.0 version and will be removed in 3.0 version.
              use <a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html#add-org.apache.hadoop.hbase.client.Mutation-"><code>add(Mutation)</code></a></span></div>
@@ -357,7 +357,7 @@ public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/c
 <li class="blockList">
 <h4>add</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.97">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Delete.html" title="class in org.apache.hadoop.hbase.client">Delete</a>&nbsp;d)
+public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.98">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Delete.html" title="class in org.apache.hadoop.hbase.client">Delete</a>&nbsp;d)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">since 2.0 version and will be removed in 3.0 version.
              use <a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html#add-org.apache.hadoop.hbase.client.Mutation-"><code>add(Mutation)</code></a></span></div>
@@ -376,7 +376,7 @@ public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/c
 <ul class="blockList">
 <li class="blockList">
 <h4>add</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.107">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&nbsp;mutation)
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.108">add</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&nbsp;mutation)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Currently only supports <a href="../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client"><code>Put</code></a> and <a href="../../../../../org/apache/hadoop/hbase/client/Delete.html" title="class in org.apache.hadoop.hbase.client"><code>Delete</code></a> mutations.</div>
 <dl>
@@ -393,7 +393,7 @@ public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/c
 <ul class="blockList">
 <li class="blockList">
 <h4>add</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.117">add</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;mutations)
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/RowMutations.html" title="class in org.apache.hadoop.hbase.client">RowMutations</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.118">add</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;mutations)
                  throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Currently only supports <a href="../../../../../org/apache/hadoop/hbase/client/Put.html" title="class in org.apache.hadoop.hbase.client"><code>Put</code></a> and <a href="../../../../../org/apache/hadoop/hbase/client/Delete.html" title="class in org.apache.hadoop.hbase.client"><code>Delete</code></a> mutations.</div>
 <dl>
@@ -411,7 +411,7 @@ public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/c
 <li class="blockList">
 <h4>compareTo</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.135">compareTo</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Row.html" title="interface in org.apache.hadoop.hbase.client">Row</a>&nbsp;i)</pre>
+public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.136">compareTo</a>(<a href="../../../../../org/apache/hadoop/hbase/client/Row.html" title="interface in org.apache.hadoop.hbase.client">Row</a>&nbsp;i)</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">As of release 2.0.0, this will be removed in HBase 3.0.0.
              Use <a href="../../../../../org/apache/hadoop/hbase/client/Row.html#COMPARATOR"><code>Row.COMPARATOR</code></a> instead</span></div>
 <dl>
@@ -429,7 +429,7 @@ public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/cl
 <li class="blockList">
 <h4>equals</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.145">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;obj)</pre>
+public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.146">equals</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>&nbsp;obj)</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">As of release 2.0.0, this will be removed in HBase 3.0.0.
              No replacement</span></div>
 <dl>
@@ -445,7 +445,7 @@ public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbas
 <li class="blockList">
 <h4>hashCode</h4>
 <pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true" title="class or interface in java.lang">@Deprecated</a>
-public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.160">hashCode</a>()</pre>
+public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.161">hashCode</a>()</pre>
 <div class="block"><span class="deprecatedLabel">Deprecated.</span>&nbsp;<span class="deprecationComment">As of release 2.0.0, this will be removed in HBase 3.0.0.
              No replacement</span></div>
 <dl>
@@ -460,7 +460,7 @@ public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/cl
 <ul class="blockList">
 <li class="blockList">
 <h4>getRow</h4>
-<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.165">getRow</a>()</pre>
+<pre>public&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.166">getRow</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/client/Row.html#getRow--">getRow</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/client/Row.html" title="interface in org.apache.hadoop.hbase.client">Row</a></code></dd>
@@ -475,7 +475,7 @@ public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/cl
 <ul class="blockList">
 <li class="blockList">
 <h4>getMutations</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.172">getMutations</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.173">getMutations</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>An unmodifiable list of the current mutations.</dd>
@@ -488,7 +488,7 @@ public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/cl
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMaxPriority</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.176">getMaxPriority</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/client/RowMutations.html#line.177">getMaxPriority</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/apidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html b/apidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html
index 76c6859..df06090 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html
@@ -31,164 +31,165 @@
 <span class="sourceLineNo">023</span>import java.util.Collections;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.util.List;<a name="line.24"></a>
 <span class="sourceLineNo">025</span><a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.commons.collections.CollectionUtils;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.28"></a>
-<span class="sourceLineNo">029</span><a name="line.29"></a>
-<span class="sourceLineNo">030</span>/**<a name="line.30"></a>
-<span class="sourceLineNo">031</span> * Performs multiple mutations atomically on a single row.<a name="line.31"></a>
-<span class="sourceLineNo">032</span> * Currently {@link Put} and {@link Delete} are supported.<a name="line.32"></a>
-<span class="sourceLineNo">033</span> *<a name="line.33"></a>
-<span class="sourceLineNo">034</span> * The mutations are performed in the order in which they<a name="line.34"></a>
-<span class="sourceLineNo">035</span> * were added.<a name="line.35"></a>
-<span class="sourceLineNo">036</span> *<a name="line.36"></a>
-<span class="sourceLineNo">037</span> * &lt;p&gt;We compare and equate mutations based off their row so be careful putting RowMutations<a name="line.37"></a>
-<span class="sourceLineNo">038</span> * into Sets or using them as keys in Maps.<a name="line.38"></a>
-<span class="sourceLineNo">039</span> */<a name="line.39"></a>
-<span class="sourceLineNo">040</span>@InterfaceAudience.Public<a name="line.40"></a>
-<span class="sourceLineNo">041</span>public class RowMutations implements Row {<a name="line.41"></a>
-<span class="sourceLineNo">042</span><a name="line.42"></a>
-<span class="sourceLineNo">043</span>  /**<a name="line.43"></a>
-<span class="sourceLineNo">044</span>   * Create a {@link RowMutations} with the specified mutations.<a name="line.44"></a>
-<span class="sourceLineNo">045</span>   * @param mutations the mutations to send<a name="line.45"></a>
-<span class="sourceLineNo">046</span>   * @return RowMutations<a name="line.46"></a>
-<span class="sourceLineNo">047</span>   * @throws IOException if any row in mutations is different to another<a name="line.47"></a>
-<span class="sourceLineNo">048</span>   */<a name="line.48"></a>
-<span class="sourceLineNo">049</span>  public static RowMutations of(List&lt;? extends Mutation&gt; mutations) throws IOException {<a name="line.49"></a>
-<span class="sourceLineNo">050</span>    if (CollectionUtils.isEmpty(mutations)) {<a name="line.50"></a>
-<span class="sourceLineNo">051</span>      throw new IllegalArgumentException("Cannot instantiate a RowMutations by empty list");<a name="line.51"></a>
-<span class="sourceLineNo">052</span>    }<a name="line.52"></a>
-<span class="sourceLineNo">053</span>    return new RowMutations(mutations.get(0).getRow(), mutations.size())<a name="line.53"></a>
-<span class="sourceLineNo">054</span>        .add(mutations);<a name="line.54"></a>
-<span class="sourceLineNo">055</span>  }<a name="line.55"></a>
-<span class="sourceLineNo">056</span><a name="line.56"></a>
-<span class="sourceLineNo">057</span>  private final List&lt;Mutation&gt; mutations;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  private final byte [] row;<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  public RowMutations(byte [] row) {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    this(row, -1);<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  }<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  /**<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * Create an atomic mutation for the specified row.<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   * @param row row key<a name="line.65"></a>
-<span class="sourceLineNo">066</span>   * @param initialCapacity the initial capacity of the RowMutations<a name="line.66"></a>
-<span class="sourceLineNo">067</span>   */<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  public RowMutations(byte [] row, int initialCapacity) {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    this.row = Bytes.copy(Mutation.checkRow(row));<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    if (initialCapacity &lt;= 0) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>      this.mutations = new ArrayList&lt;&gt;();<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    } else {<a name="line.72"></a>
-<span class="sourceLineNo">073</span>      this.mutations = new ArrayList&lt;&gt;(initialCapacity);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    }<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  }<a name="line.75"></a>
-<span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span>  /**<a name="line.77"></a>
-<span class="sourceLineNo">078</span>   * Add a {@link Put} operation to the list of mutations<a name="line.78"></a>
-<span class="sourceLineNo">079</span>   * @param p The {@link Put} to add<a name="line.79"></a>
-<span class="sourceLineNo">080</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.80"></a>
-<span class="sourceLineNo">081</span>   * @deprecated since 2.0 version and will be removed in 3.0 version.<a name="line.81"></a>
-<span class="sourceLineNo">082</span>   *             use {@link #add(Mutation)}<a name="line.82"></a>
-<span class="sourceLineNo">083</span>   */<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  @Deprecated<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public void add(Put p) throws IOException {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    add((Mutation) p);<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  }<a name="line.87"></a>
-<span class="sourceLineNo">088</span><a name="line.88"></a>
-<span class="sourceLineNo">089</span>  /**<a name="line.89"></a>
-<span class="sourceLineNo">090</span>   * Add a {@link Delete} operation to the list of mutations<a name="line.90"></a>
-<span class="sourceLineNo">091</span>   * @param d The {@link Delete} to add<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * @deprecated since 2.0 version and will be removed in 3.0 version.<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   *             use {@link #add(Mutation)}<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  @Deprecated<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  public void add(Delete d) throws IOException {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    add((Mutation) d);<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  /**<a name="line.101"></a>
-<span class="sourceLineNo">102</span>   * Currently only supports {@link Put} and {@link Delete} mutations.<a name="line.102"></a>
-<span class="sourceLineNo">103</span>   *<a name="line.103"></a>
-<span class="sourceLineNo">104</span>   * @param mutation The data to send.<a name="line.104"></a>
-<span class="sourceLineNo">105</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   */<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  public RowMutations add(Mutation mutation) throws IOException {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    return add(Collections.singletonList(mutation));<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  /**<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   * Currently only supports {@link Put} and {@link Delete} mutations.<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   *<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   * @param mutations The data to send.<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.115"></a>
-<span class="sourceLineNo">116</span>   */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  public RowMutations add(List&lt;? extends Mutation&gt; mutations) throws IOException {<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    for (Mutation mutation : mutations) {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>      if (!Bytes.equals(row, mutation.getRow())) {<a name="line.119"></a>
-<span class="sourceLineNo">120</span>        throw new WrongRowIOException("The row in the recently added Put/Delete &lt;" +<a name="line.120"></a>
-<span class="sourceLineNo">121</span>          Bytes.toStringBinary(mutation.getRow()) + "&gt; doesn't match the original one &lt;" +<a name="line.121"></a>
-<span class="sourceLineNo">122</span>          Bytes.toStringBinary(this.row) + "&gt;");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      }<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    }<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    this.mutations.addAll(mutations);<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    return this;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>  }<a name="line.127"></a>
-<span class="sourceLineNo">128</span><a name="line.128"></a>
-<span class="sourceLineNo">129</span>  /**<a name="line.129"></a>
-<span class="sourceLineNo">130</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   *             Use {@link Row#COMPARATOR} instead<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   */<a name="line.132"></a>
-<span class="sourceLineNo">133</span>  @Deprecated<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  @Override<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  public int compareTo(Row i) {<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    return Bytes.compareTo(this.getRow(), i.getRow());<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  /**<a name="line.139"></a>
-<span class="sourceLineNo">140</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.140"></a>
-<span class="sourceLineNo">141</span>   *             No replacement<a name="line.141"></a>
-<span class="sourceLineNo">142</span>   */<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  @Deprecated<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  @Override<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  public boolean equals(Object obj) {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    if (obj == this) return true;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    if (obj instanceof RowMutations) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      RowMutations other = (RowMutations)obj;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      return compareTo(other) == 0;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    return false;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>  }<a name="line.152"></a>
-<span class="sourceLineNo">153</span><a name="line.153"></a>
-<span class="sourceLineNo">154</span>  /**<a name="line.154"></a>
-<span class="sourceLineNo">155</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.155"></a>
-<span class="sourceLineNo">156</span>   *             No replacement<a name="line.156"></a>
-<span class="sourceLineNo">157</span>   */<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  @Deprecated<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  @Override<a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public int hashCode(){<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    return Arrays.hashCode(row);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  }<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>  @Override<a name="line.164"></a>
-<span class="sourceLineNo">165</span>  public byte[] getRow() {<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    return row;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  }<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  /**<a name="line.169"></a>
-<span class="sourceLineNo">170</span>   * @return An unmodifiable list of the current mutations.<a name="line.170"></a>
-<span class="sourceLineNo">171</span>   */<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  public List&lt;Mutation&gt; getMutations() {<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    return Collections.unmodifiableList(mutations);<a name="line.173"></a>
-<span class="sourceLineNo">174</span>  }<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  public int getMaxPriority() {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    int maxPriority = Integer.MIN_VALUE;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>    for (Mutation mutation : mutations) {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      maxPriority = Math.max(maxPriority, mutation.getPriority());<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    }<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    return maxPriority;<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  }<a name="line.182"></a>
-<span class="sourceLineNo">183</span>}<a name="line.183"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.27"></a>
+<span class="sourceLineNo">028</span><a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.29"></a>
+<span class="sourceLineNo">030</span><a name="line.30"></a>
+<span class="sourceLineNo">031</span>/**<a name="line.31"></a>
+<span class="sourceLineNo">032</span> * Performs multiple mutations atomically on a single row.<a name="line.32"></a>
+<span class="sourceLineNo">033</span> * Currently {@link Put} and {@link Delete} are supported.<a name="line.33"></a>
+<span class="sourceLineNo">034</span> *<a name="line.34"></a>
+<span class="sourceLineNo">035</span> * The mutations are performed in the order in which they<a name="line.35"></a>
+<span class="sourceLineNo">036</span> * were added.<a name="line.36"></a>
+<span class="sourceLineNo">037</span> *<a name="line.37"></a>
+<span class="sourceLineNo">038</span> * &lt;p&gt;We compare and equate mutations based off their row so be careful putting RowMutations<a name="line.38"></a>
+<span class="sourceLineNo">039</span> * into Sets or using them as keys in Maps.<a name="line.39"></a>
+<span class="sourceLineNo">040</span> */<a name="line.40"></a>
+<span class="sourceLineNo">041</span>@InterfaceAudience.Public<a name="line.41"></a>
+<span class="sourceLineNo">042</span>public class RowMutations implements Row {<a name="line.42"></a>
+<span class="sourceLineNo">043</span><a name="line.43"></a>
+<span class="sourceLineNo">044</span>  /**<a name="line.44"></a>
+<span class="sourceLineNo">045</span>   * Create a {@link RowMutations} with the specified mutations.<a name="line.45"></a>
+<span class="sourceLineNo">046</span>   * @param mutations the mutations to send<a name="line.46"></a>
+<span class="sourceLineNo">047</span>   * @return RowMutations<a name="line.47"></a>
+<span class="sourceLineNo">048</span>   * @throws IOException if any row in mutations is different to another<a name="line.48"></a>
+<span class="sourceLineNo">049</span>   */<a name="line.49"></a>
+<span class="sourceLineNo">050</span>  public static RowMutations of(List&lt;? extends Mutation&gt; mutations) throws IOException {<a name="line.50"></a>
+<span class="sourceLineNo">051</span>    if (CollectionUtils.isEmpty(mutations)) {<a name="line.51"></a>
+<span class="sourceLineNo">052</span>      throw new IllegalArgumentException("Cannot instantiate a RowMutations by empty list");<a name="line.52"></a>
+<span class="sourceLineNo">053</span>    }<a name="line.53"></a>
+<span class="sourceLineNo">054</span>    return new RowMutations(mutations.get(0).getRow(), mutations.size())<a name="line.54"></a>
+<span class="sourceLineNo">055</span>        .add(mutations);<a name="line.55"></a>
+<span class="sourceLineNo">056</span>  }<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>  private final List&lt;Mutation&gt; mutations;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>  private final byte [] row;<a name="line.59"></a>
+<span class="sourceLineNo">060</span><a name="line.60"></a>
+<span class="sourceLineNo">061</span>  public RowMutations(byte [] row) {<a name="line.61"></a>
+<span class="sourceLineNo">062</span>    this(row, -1);<a name="line.62"></a>
+<span class="sourceLineNo">063</span>  }<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  /**<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * Create an atomic mutation for the specified row.<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   * @param row row key<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   * @param initialCapacity the initial capacity of the RowMutations<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   */<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  public RowMutations(byte [] row, int initialCapacity) {<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    this.row = Bytes.copy(Mutation.checkRow(row));<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    if (initialCapacity &lt;= 0) {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>      this.mutations = new ArrayList&lt;&gt;();<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    } else {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      this.mutations = new ArrayList&lt;&gt;(initialCapacity);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>  }<a name="line.76"></a>
+<span class="sourceLineNo">077</span><a name="line.77"></a>
+<span class="sourceLineNo">078</span>  /**<a name="line.78"></a>
+<span class="sourceLineNo">079</span>   * Add a {@link Put} operation to the list of mutations<a name="line.79"></a>
+<span class="sourceLineNo">080</span>   * @param p The {@link Put} to add<a name="line.80"></a>
+<span class="sourceLineNo">081</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.81"></a>
+<span class="sourceLineNo">082</span>   * @deprecated since 2.0 version and will be removed in 3.0 version.<a name="line.82"></a>
+<span class="sourceLineNo">083</span>   *             use {@link #add(Mutation)}<a name="line.83"></a>
+<span class="sourceLineNo">084</span>   */<a name="line.84"></a>
+<span class="sourceLineNo">085</span>  @Deprecated<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  public void add(Put p) throws IOException {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    add((Mutation) p);<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  }<a name="line.88"></a>
+<span class="sourceLineNo">089</span><a name="line.89"></a>
+<span class="sourceLineNo">090</span>  /**<a name="line.90"></a>
+<span class="sourceLineNo">091</span>   * Add a {@link Delete} operation to the list of mutations<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   * @param d The {@link Delete} to add<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.93"></a>
+<span class="sourceLineNo">094</span>   * @deprecated since 2.0 version and will be removed in 3.0 version.<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   *             use {@link #add(Mutation)}<a name="line.95"></a>
+<span class="sourceLineNo">096</span>   */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  @Deprecated<a name="line.97"></a>
+<span class="sourceLineNo">098</span>  public void add(Delete d) throws IOException {<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    add((Mutation) d);<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>  /**<a name="line.102"></a>
+<span class="sourceLineNo">103</span>   * Currently only supports {@link Put} and {@link Delete} mutations.<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   *<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * @param mutation The data to send.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.106"></a>
+<span class="sourceLineNo">107</span>   */<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  public RowMutations add(Mutation mutation) throws IOException {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    return add(Collections.singletonList(mutation));<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  }<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>  /**<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * Currently only supports {@link Put} and {@link Delete} mutations.<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   *<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   * @param mutations The data to send.<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.116"></a>
+<span class="sourceLineNo">117</span>   */<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  public RowMutations add(List&lt;? extends Mutation&gt; mutations) throws IOException {<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    for (Mutation mutation : mutations) {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>      if (!Bytes.equals(row, mutation.getRow())) {<a name="line.120"></a>
+<span class="sourceLineNo">121</span>        throw new WrongRowIOException("The row in the recently added Put/Delete &lt;" +<a name="line.121"></a>
+<span class="sourceLineNo">122</span>          Bytes.toStringBinary(mutation.getRow()) + "&gt; doesn't match the original one &lt;" +<a name="line.122"></a>
+<span class="sourceLineNo">123</span>          Bytes.toStringBinary(this.row) + "&gt;");<a name="line.123"></a>
+<span class="sourceLineNo">124</span>      }<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    }<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    this.mutations.addAll(mutations);<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    return this;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
+<span class="sourceLineNo">129</span><a name="line.129"></a>
+<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
+<span class="sourceLineNo">131</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.131"></a>
+<span class="sourceLineNo">132</span>   *             Use {@link Row#COMPARATOR} instead<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   */<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  @Deprecated<a name="line.134"></a>
+<span class="sourceLineNo">135</span>  @Override<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  public int compareTo(Row i) {<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    return Bytes.compareTo(this.getRow(), i.getRow());<a name="line.137"></a>
+<span class="sourceLineNo">138</span>  }<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>  /**<a name="line.140"></a>
+<span class="sourceLineNo">141</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.141"></a>
+<span class="sourceLineNo">142</span>   *             No replacement<a name="line.142"></a>
+<span class="sourceLineNo">143</span>   */<a name="line.143"></a>
+<span class="sourceLineNo">144</span>  @Deprecated<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  @Override<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  public boolean equals(Object obj) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    if (obj == this) return true;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    if (obj instanceof RowMutations) {<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      RowMutations other = (RowMutations)obj;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      return compareTo(other) == 0;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    }<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    return false;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  /**<a name="line.155"></a>
+<span class="sourceLineNo">156</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.156"></a>
+<span class="sourceLineNo">157</span>   *             No replacement<a name="line.157"></a>
+<span class="sourceLineNo">158</span>   */<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  @Deprecated<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  @Override<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  public int hashCode(){<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    return Arrays.hashCode(row);<a name="line.162"></a>
+<span class="sourceLineNo">163</span>  }<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>  @Override<a name="line.165"></a>
+<span class="sourceLineNo">166</span>  public byte[] getRow() {<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    return row;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  }<a name="line.168"></a>
+<span class="sourceLineNo">169</span><a name="line.169"></a>
+<span class="sourceLineNo">170</span>  /**<a name="line.170"></a>
+<span class="sourceLineNo">171</span>   * @return An unmodifiable list of the current mutations.<a name="line.171"></a>
+<span class="sourceLineNo">172</span>   */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  public List&lt;Mutation&gt; getMutations() {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    return Collections.unmodifiableList(mutations);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>  }<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>  public int getMaxPriority() {<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    int maxPriority = Integer.MIN_VALUE;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    for (Mutation mutation : mutations) {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      maxPriority = Math.max(maxPriority, mutation.getPriority());<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    }<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    return maxPriority;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>}<a name="line.184"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/apidocs/src-html/org/apache/hadoop/hbase/net/Address.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/net/Address.html b/apidocs/src-html/org/apache/hadoop/hbase/net/Address.html
index 66aba38..c893aec 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/net/Address.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/net/Address.html
@@ -25,7 +25,7 @@
 <span class="sourceLineNo">017</span> */<a name="line.17"></a>
 <span class="sourceLineNo">018</span>package org.apache.hadoop.hbase.net;<a name="line.18"></a>
 <span class="sourceLineNo">019</span><a name="line.19"></a>
-<span class="sourceLineNo">020</span>import org.apache.commons.lang.StringUtils;<a name="line.20"></a>
+<span class="sourceLineNo">020</span>import org.apache.commons.lang3.StringUtils;<a name="line.20"></a>
 <span class="sourceLineNo">021</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.21"></a>
 <span class="sourceLineNo">022</span><a name="line.22"></a>
 <span class="sourceLineNo">023</span>import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort;<a name="line.23"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
index e606e82..a242321 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.ByteArrayComparator.html
@@ -46,18 +46,18 @@
 <span class="sourceLineNo">038</span>import java.util.Iterator;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import java.util.List;<a name="line.39"></a>
 <span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.commons.collections.CollectionUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.io.RawComparator;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableComparator;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.io.WritableUtils;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.52"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.Cell;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.io.RawComparator;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.io.WritableComparator;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import com.google.protobuf.ByteString;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
index e606e82..a242321 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.RowEndKeyComparator.html
@@ -46,18 +46,18 @@
 <span class="sourceLineNo">038</span>import java.util.Iterator;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import java.util.List;<a name="line.39"></a>
 <span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.commons.collections.CollectionUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.io.RawComparator;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableComparator;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.io.WritableUtils;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.52"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.Cell;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.io.RawComparator;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.io.WritableComparator;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import com.google.protobuf.ByteString;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
----------------------------------------------------------------------
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
index e606e82..a242321 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Bytes.html
@@ -46,18 +46,18 @@
 <span class="sourceLineNo">038</span>import java.util.Iterator;<a name="line.38"></a>
 <span class="sourceLineNo">039</span>import java.util.List;<a name="line.39"></a>
 <span class="sourceLineNo">040</span><a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.apache.commons.collections.CollectionUtils;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.Cell;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.io.RawComparator;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableComparator;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.io.WritableUtils;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.slf4j.Logger;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.slf4j.LoggerFactory;<a name="line.50"></a>
-<span class="sourceLineNo">051</span><a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.52"></a>
+<span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.Cell;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.42"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.io.RawComparator;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.io.WritableComparator;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.io.WritableUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.slf4j.Logger;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.slf4j.LoggerFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span><a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.52"></a>
 <span class="sourceLineNo">053</span><a name="line.53"></a>
 <span class="sourceLineNo">054</span>import com.google.protobuf.ByteString;<a name="line.54"></a>
 <span class="sourceLineNo">055</span><a name="line.55"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/book.html
----------------------------------------------------------------------
diff --git a/book.html b/book.html
index 4212d1d..8de5bcb 100644
--- a/book.html
+++ b/book.html
@@ -7008,6 +7008,16 @@ being dumped at DEBUG level as preamble on every shell command invocation.</p>
 <div class="paragraph">
 <p>If you previously relied on client side tracing integrated with HBase operations, it is recommended that you upgrade your usage to HTrace 4 as well.</p>
 </div>
+<div id="upgrade2.0.perf" class="paragraph">
+<div class="title">Performance</div>
+<p>You will likely see a change in the performance profile on upgrade to hbase-2.0.0 given
+read and write paths have undergone significant change. On release, writes may be
+slower with reads about the same or much better, dependent on context. Be prepared
+to spend time re-tuning (See <a href="#performance">Apache HBase Performance Tuning</a>).
+Performance is also an area that is now under active review so look forward to
+improvement in coming releases (See
+<a href="https://issues.apache.org/jira/browse/HBASE-20188">HBASE-20188 TESTING Performance</a>).</p>
+</div>
 </div>
 <div class="sect3">
 <h4 id="upgrade2.0.coprocessors"><a class="anchor" href="#upgrade2.0.coprocessors"></a>13.1.2. Upgrading Coprocessors to 2.0</h4>
@@ -14471,8 +14481,10 @@ Documentation will eventually move to this reference guide, but the blog is the
 <div class="sect2">
 <h3 id="block.cache"><a class="anchor" href="#block.cache"></a>70.4. Block Cache</h3>
 <div class="paragraph">
-<p>HBase provides two different BlockCache implementations: the default on-heap <code>LruBlockCache</code> and the <code>BucketCache</code>, which is (usually) off-heap.
-This section discusses benefits and drawbacks of each implementation, how to choose the appropriate option, and configuration options for each.</p>
+<p>HBase provides two different BlockCache implementations to cache data read from HDFS:
+the default on-heap <code>LruBlockCache</code> and the <code>BucketCache</code>, which is (usually) off-heap.
+This section discusses benefits and drawbacks of each implementation, how to choose the
+appropriate option, and configuration options for each.</p>
 </div>
 <div class="admonitionblock note">
 <table>
@@ -14484,7 +14496,7 @@ This section discusses benefits and drawbacks of each implementation, how to cho
 <div class="title">Block Cache Reporting: UI</div>
 <div class="paragraph">
 <p>See the RegionServer UI for detail on caching deploy.
-Since HBase 0.98.4, the Block Cache detail has been significantly extended showing configurations, sizings, current usage, time-in-the-cache, and even detail on block counts and types.</p>
+See configurations, sizings, current usage, time-in-the-cache, and even detail on block counts and types.</p>
 </div>
 </td>
 </tr>
@@ -14493,44 +14505,22 @@ Since HBase 0.98.4, the Block Cache detail has been significantly extended showi
 <div class="sect3">
 <h4 id="_cache_choices"><a class="anchor" href="#_cache_choices"></a>70.4.1. Cache Choices</h4>
 <div class="paragraph">
-<p><code>LruBlockCache</code> is the original implementation, and is entirely within the Java heap. <code>BucketCache</code> is mainly intended for keeping block cache data off-heap, although <code>BucketCache</code> can also keep data on-heap and serve from a file-backed cache.</p>
-</div>
-<div class="admonitionblock note">
-<table>
-<tr>
-<td class="icon">
-<i class="fa icon-note" title="Note"></i>
-</td>
-<td class="content">
-<div class="title">BucketCache is production ready as of HBase 0.98.6</div>
-<div class="paragraph">
-<p>To run with BucketCache, you need HBASE-11678.
-This was included in 0.98.6.</p>
-</div>
-</td>
-</tr>
-</table>
-</div>
-<div class="paragraph">
-<p>Fetching will always be slower when fetching from BucketCache, as compared to the native on-heap LruBlockCache.
-However, latencies tend to be less erratic across time, because there is less garbage collection when you use BucketCache since it is managing BlockCache allocations, not the GC.
-If the BucketCache is deployed in off-heap mode, this memory is not managed by the GC at all.
-This is why you&#8217;d use BucketCache, so your latencies are less erratic and to mitigate GCs and heap fragmentation.
-See Nick Dimiduk&#8217;s <a href="http://www.n10k.com/blog/blockcache-101/">BlockCache 101</a> for comparisons running on-heap vs off-heap tests.
-Also see <a href="https://people.apache.org/~stack/bc/">Comparing BlockCache Deploys</a> which finds that if your dataset fits inside your LruBlockCache deploy, use it otherwise if you are experiencing cache churn (or you want your cache to exist beyond the vagaries of java GC), use BucketCache.</p>
+<p><code>LruBlockCache</code> is the original implementation, and is entirely within the Java heap.
+<code>BucketCache</code> is optional and mainly intended for keeping block cache data off-heap, although <code>BucketCache</code> can also be a file-backed cache.</p>
 </div>
 <div class="paragraph">
-<p>When you enable BucketCache, you are enabling a two tier caching system, an L1 cache which is implemented by an instance of LruBlockCache and an off-heap L2 cache which is implemented by BucketCache.
-Management of these two tiers and the policy that dictates how blocks move between them is done by <code>CombinedBlockCache</code>.
-It keeps all DATA blocks in the L2 BucketCache and meta blocks&#8201;&#8212;&#8201;INDEX and BLOOM blocks&#8201;&#8212;&#8201;on-heap in the L1 <code>LruBlockCache</code>.
-See <a href="#offheap.blockcache">Off-heap Block Cache</a> for more detail on going off-heap.</p>
+<p>When you enable BucketCache, you are enabling a two tier caching system. We used to describe the
+tiers as "L1" and "L2" but have deprecated this terminology as of hbase-2.0.0. The "L1" cache referred to an
+instance of LruBlockCache and "L2" to an off-heap BucketCache. Instead, when BucketCache is enabled,
+all DATA blocks are kept in the BucketCache tier and meta blocks&#8201;&#8212;&#8201;INDEX and BLOOM blocks&#8201;&#8212;&#8201;are on-heap in the <code>LruBlockCache</code>.
+Management of these two tiers and the policy that dictates how blocks move between them is done by <code>CombinedBlockCache</code>.</p>
 </div>
 </div>
 <div class="sect3">
 <h4 id="cache.configurations"><a class="anchor" href="#cache.configurations"></a>70.4.2. General Cache Configurations</h4>
 <div class="paragraph">
 <p>Apart from the cache implementation itself, you can set some general configuration options to control how the cache performs.
-See <a href="https://hbase.apache.org/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html" class="bare">https://hbase.apache.org/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html</a>.
+See <a href="https://hbase.apache.org/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html">CacheConfig</a>.
 After setting any of these options, restart or rolling restart your cluster for the configuration to take effect.
 Check logs for errors or unexpected behavior.</p>
 </div>
@@ -14596,7 +14586,7 @@ An important concept is the <a href="http://en.wikipedia.org/wiki/Working_set_si
 </div>
 </div>
 <div class="paragraph">
-<p>The default value for the block cache is 0.25 which represents 25% of the available heap.
+<p>The default value for the block cache is 0.4 which represents 40% of the available heap.
 The last value (99%) is the default acceptable loading factor in the LRU cache after which eviction is started.
 The reason it is included in this equation is that it would be unrealistic to say that it is possible to use 100% of the available memory since this would make the process blocking from the point where it loads new blocks.
 Here are some examples:</p>
@@ -14604,10 +14594,10 @@ Here are some examples:</p>
 <div class="ulist">
 <ul>
 <li>
-<p>One region server with the heap size set to 1 GB and the default block cache size will have 253 MB of block cache available.</p>
+<p>One region server with the heap size set to 1 GB and the default block cache size will have 405 MB of block cache available.</p>
 </li>
 <li>
-<p>20 region servers with the heap size set to 8 GB and a default block cache size will have 39.6 of block cache.</p>
+<p>20 region servers with the heap size set to 8 GB and a default block cache size will have 63.3 of block cache.</p>
 </li>
 <li>
 <p>100 region servers with the heap size set to 24 GB and a block cache size of 0.5 will have about 1.16 TB of block cache.</p>
@@ -14698,35 +14688,79 @@ Since <a href="https://issues.apache.org/jira/browse/HBASE-4683">HBASE-4683 Alwa
 <div class="sect4">
 <h5 id="enable.bucketcache"><a class="anchor" href="#enable.bucketcache"></a>How to Enable BucketCache</h5>
 <div class="paragraph">
-<p>The usual deploy of BucketCache is via a managing class that sets up two caching tiers: an L1 on-heap cache implemented by LruBlockCache and a second L2 cache implemented with BucketCache.
+<p>The usual deploy of BucketCache is via a managing class that sets up two caching tiers:
+an on-heap cache implemented by LruBlockCache and a second  cache implemented with BucketCache.
 The managing class is <a href="https://hbase.apache.org/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.html">CombinedBlockCache</a> by default.
 The previous link describes the caching 'policy' implemented by CombinedBlockCache.
-In short, it works by keeping meta blocks&#8201;&#8212;&#8201;INDEX and BLOOM in the L1, on-heap LruBlockCache tier&#8201;&#8212;&#8201;and DATA blocks are kept in the L2, BucketCache tier.
-It is possible to amend this behavior in HBase since version 1.0 and ask that a column family have both its meta and DATA blocks hosted on-heap in the L1 tier by setting <code>cacheDataInL1</code> via <code>(HColumnDescriptor.setCacheDataInL1(true)</code> or in the shell, creating or amending column families setting <code>CACHE_DATA_IN_L1</code> to true: e.g.</p>
+In short, it works by keeping meta blocks&#8201;&#8212;&#8201;INDEX and BLOOM in the on-heap LruBlockCache tier&#8201;&#8212;&#8201;and DATA blocks are kept in the BucketCache tier.</p>
+</div>
+<div class="exampleblock">
+<div class="content">
+<div class="dlist">
+<dl>
+<dt class="hdlist1">Pre-hbase-2.0.0 versions</dt>
+<dd>
+<p>Fetching will always be slower when fetching from BucketCache in pre-hbase-2.0.0,
+as compared to the native on-heap LruBlockCache. However, latencies tend to be less
+erratic across time, because there is less garbage collection when you use BucketCache since it is managing BlockCache allocations, not the GC.
+If the BucketCache is deployed in off-heap mode, this memory is not managed by the GC at all.
+This is why you&#8217;d use BucketCache in pre-2.0.0, so your latencies are less erratic,
+to mitigate GCs and heap fragmentation, and so you can safely use more memory.
+See Nick Dimiduk&#8217;s <a href="http://www.n10k.com/blog/blockcache-101/">BlockCache 101</a> for comparisons running on-heap vs off-heap tests.
+Also see <a href="https://people.apache.org/~stack/bc/">Comparing BlockCache Deploys</a> which finds that if your dataset fits inside your LruBlockCache deploy, use it otherwise if you are experiencing cache churn (or you want your cache to exist beyond the vagaries of java GC), use BucketCache.</p>
+<div class="paragraph">
+<p>In pre-2.0.0,
+one can configure the BucketCache so it receives the <code>victim</code> of an LruBlockCache eviction.
+All Data and index blocks are cached in L1 first. When eviction happens from L1, the blocks (or <code>victims</code>) will get moved to L2.
+Set <code>cacheDataInL1</code> via <code>(HColumnDescriptor.setCacheDataInL1(true)</code> or in the shell, creating or amending column families setting <code>CACHE_DATA_IN_L1</code> to true: e.g.</p>
+</div>
+</dd>
+</dl>
 </div>
 <div class="listingblock">
 <div class="content">
 <pre class="CodeRay highlight"><code data-lang="java">hbase(main):<span class="octal">003</span>:<span class="integer">0</span>&gt; create <span class="string"><span class="delimiter">'</span><span class="content">t</span><span class="delimiter">'</span></span>, {NAME =&gt; <span class="string"><span class="delimiter">'</span><span class="content">t</span><span class="delimiter">'</span></span>, CONFIGURATION =&gt; {CACHE_DATA_IN_L1 =&gt; <span class="string"><span class="delimiter">'</span><span class="content">true</span><span class="delimiter">'</span></span>}}</code></pre>
 </div>
 </div>
+<div class="dlist">
+<dl>
+<dt class="hdlist1">hbase-2.0.0+ versions</dt>
+<dd>
+<p>HBASE-11425 changed the HBase read path so it could hold the read-data off-heap avoiding copying of cached data on to the java heap.
+See <a href="#regionserver.offheap.readpath">Offheap read-path</a>. In hbase-2.0.0, off-heap latencies approach those of on-heap cache latencies with the added
+benefit of NOT provoking GC.</p>
+<div class="paragraph">
+<p>From HBase 2.0.0 onwards, the notions of L1 and L2 have been deprecated. When BucketCache is turned on, the DATA blocks will always go to BucketCache and INDEX/BLOOM blocks go to on heap LRUBlockCache. <code>cacheDataInL1</code> support hase been removed.</p>
+</div>
+</dd>
+</dl>
+</div>
+</div>
+</div>
 <div class="paragraph">
-<p>The BucketCache Block Cache can be deployed on-heap, off-heap, or file based.
-You set which via the <code>hbase.bucketcache.ioengine</code> setting.
-Setting it to <code>heap</code> will have BucketCache deployed inside the allocated Java heap.
-Setting it to <code>offheap</code> will have BucketCache make its allocations off-heap, and an ioengine setting of <code>file:PATH_TO_FILE</code> will direct BucketCache to use a file caching (Useful in particular if you have some fast I/O attached to the box such as SSDs).</p>
+<p>The BucketCache Block Cache can be deployed <em>off-heap</em>, <em>file</em> or <em>mmaped</em> file mode.</p>
 </div>
 <div class="paragraph">
-<p>It is possible to deploy an L1+L2 setup where we bypass the CombinedBlockCache policy and have BucketCache working as a strict L2 cache to the L1 LruBlockCache.
-For such a setup, set <code>CacheConfig.BUCKET_CACHE_COMBINED_KEY</code> to <code>false</code>.
+<p>You set which via the <code>hbase.bucketcache.ioengine</code> setting.
+Setting it to <code>offheap</code> will have BucketCache make its allocations off-heap, and an ioengine setting of <code>file:PATH_TO_FILE</code> will direct BucketCache to use file caching (Useful in particular if you have some fast I/O attached to the box such as SSDs). From 2.0.0, it is possible to have more than one file backing the BucketCache. This is very useful specially when the Cache size requirement is high. For multiple backing files, configure ioengine as <code>files:PATH_TO_FILE1,PATH_TO_FILE2,PATH_TO_FILE3</code>. BucketCache can be configured to use an mmapped file also. Configure ioengine as <code>mmap:PATH_TO_FILE</code> for this.</p>
+</div>
+<div class="paragraph">
+<p>It is possible to deploy a tiered setup where we bypass the CombinedBlockCache policy and have BucketCache working as a strict L2 cache to the L1 LruBlockCache.
+For such a setup, set <code>hbase.bucketcache.combinedcache.enabled</code> to <code>false</code>.
 In this mode, on eviction from L1, blocks go to L2.
 When a block is cached, it is cached first in L1.
 When we go to look for a cached block, we look first in L1 and if none found, then search L2.
-Let us call this deploy format, <em>Raw L1+L2</em>.</p>
+Let us call this deploy format, <em>Raw L1+L2</em>.
+NOTE: This L1+L2 mode is removed from 2.0.0. When BucketCache is used, it will be strictly the DATA cache and the LruBlockCache will cache INDEX/META blocks.</p>
 </div>
 <div class="paragraph">
 <p>Other BucketCache configs include: specifying a location to persist cache to across restarts, how many threads to use writing the cache, etc.
 See the <a href="https://hbase.apache.org/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html">CacheConfig.html</a> class for configuration options and descriptions.</p>
 </div>
+<div class="paragraph">
+<p>To check it enabled, look for the log line describing cache setup; it will detail how BucketCache has been deployed.
+Also see the UI. It will detail the cache tiering and their configuration.</p>
+</div>
 <div class="sect5">
 <h6 id="_bucketcache_example_configuration"><a class="anchor" href="#_bucketcache_example_configuration"></a>BucketCache Example Configuration</h6>
 <div class="paragraph">
@@ -14803,9 +14837,10 @@ The following example configures buckets of size 4096 and 8192.</p>
 <div class="title">Direct Memory Usage In HBase</div>
 <div class="paragraph">
 <p>The default maximum direct memory varies by JVM.
-Traditionally it is 64M or some relation to allocated heap size (-Xmx) or no limit at all (JDK7 apparently). HBase servers use direct memory, in particular short-circuit reading, the hosted DFSClient will allocate direct memory buffers.
+Traditionally it is 64M or some relation to allocated heap size (-Xmx) or no limit at all (JDK7 apparently). HBase servers use direct memory, in particular short-circuit reading (See <a href="#perf.hdfs.configs.localread">Leveraging local data</a>), the hosted DFSClient will allocate direct memory buffers. How much the DFSClient uses is not easy to quantify; it is the number of open HFiles * <code>hbase.dfs.client.read.shortcircuit.buffer.size</code> where <code>hbase.dfs.client.read.shortcircuit.buffer.size</code> is set to 128k in HBase&#8201;&#8212;&#8201;see <em>hbase-default.xml</em> default configurations.
 If you do off-heap block caching, you&#8217;ll be making use of direct memory.
-Starting your JVM, make sure the <code>-XX:MaxDirectMemorySize</code> setting in <em>conf/hbase-env.sh</em> is set to some value that is higher than what you have allocated to your off-heap BlockCache (<code>hbase.bucketcache.size</code>). It should be larger than your off-heap block cache and then some for DFSClient usage (How much the DFSClient uses is not easy to quantify; it is the number of open HFiles * <code>hbase.dfs.client.read.shortcircuit.buffer.size</code> where <code>hbase.dfs.client.read.shortcircuit.buffer.size</code> is set to 128k in HBase&#8201;&#8212;&#8201;see <em>hbase-default.xml</em> default configurations). Direct memory, which is part of the Java process heap, is separate from the object heap allocated by -Xmx.
+The RPCServer uses a ByteBuffer pool. From 2.0.0, these buffers are off-heap ByteBuffers.
+Starting your JVM, make sure the <code>-XX:MaxDirectMemorySize</code> setting in <em>conf/hbase-env.sh</em> considers off-heap BlockCache (<code>hbase.bucketcache.size</code>), DFSClient usage, RPC side ByteBufferPool max size. This has to be bit higher than sum of off heap BlockCache size and max ByteBufferPool size. Allocating an extra of 1-2 GB for the max direct memory size has worked in tests. Direct memory, which is part of the Java process heap, is separate from the object heap allocated by -Xmx.
 The value allocated by <code>MaxDirectMemorySize</code> must not exceed physical RAM, and is likely to be less than the total available RAM due to other memory requirements and system constraints.</p>
 </div>
 <div class="paragraph">
@@ -14836,7 +14871,7 @@ where size-of-bucket-cache itself is EITHER the value of the configuration <code
 </div>
 <div class="paragraph">
 <p>In 1.0, it should be more straight-forward.
-L1 LruBlockCache size is set as a fraction of java heap using <code>hfile.block.cache.size setting</code> (not the best name) and L2 is set as above either in absolute Megabytes or as a fraction of allocated maximum direct memory.</p>
+Onheap LruBlockCache size is set as a fraction of java heap using <code>hfile.block.cache.size setting</code> (not the best name) and BucketCache is set as above in absolute Megabytes.</p>
 </div>
 </td>
 </tr>
@@ -14861,7 +14896,66 @@ For a RegionServer hosting data that can comfortably fit into cache, or if your
 </div>
 </div>
 <div class="sect2">
-<h3 id="regionserver_splitting_implementation"><a class="anchor" href="#regionserver_splitting_implementation"></a>70.5. RegionServer Splitting Implementation</h3>
+<h3 id="regionserver.offheap"><a class="anchor" href="#regionserver.offheap"></a>70.5. RegionServer Offheap Read/Write Path</h3>
+<div class="sect3">
+<h4 id="regionserver.offheap.readpath"><a class="anchor" href="#regionserver.offheap.readpath"></a>70.5.1. Offheap read-path</h4>
+<div class="paragraph">
+<p>In hbase-2.0.0, <a href="https://issues.apache.org/jira/browse/HBASE-11425">HBASE-11425</a> changed the HBase read path so it
+could hold the read-data off-heap avoiding copying of cached data on to the java heap.
+This reduces GC pauses given there is less garbage made and so less to clear. The off-heap read path has a performance
+that is similar/better to that of the on-heap LRU cache.  This feature is available since HBase 2.0.0.
+If the BucketCache is in <code>file</code> mode, fetching will always be slower compared to the native on-heap LruBlockCache.
+Refer to below blogs for more details and test results on off heaped read path
+<a href="https://blogs.apache.org/hbase/entry/offheaping_the_read_path_in">Offheaping the Read Path in Apache HBase: Part 1 of 2</a>
+and <a href="https://blogs.apache.org/hbase/entry/offheap-read-path-in-production">Offheap Read-Path in Production - The Alibaba story</a></p>
+</div>
+<div class="paragraph">
+<p>For an end-to-end off-heaped read-path, first of all there should be an off-heap backed <a href="#offheap.blockcache">Off-heap Block Cache</a>(BC). Configure 'hbase.bucketcache.ioengine' to off-heap in
+<em>hbase-site.xml</em>. Also specify the total capacity of the BC using <code>hbase.bucketcache.size</code> config. Please remember to adjust value of 'HBASE_OFFHEAPSIZE' in
+<em>hbase-env.sh</em>. This is how we specify the max possible off-heap memory allocation for the
+RegionServer java process. This should be bigger than the off-heap BC size. Please keep in mind that there is no default for <code>hbase.bucketcache.ioengine</code>
+which means the BC is turned OFF by default (See <a href="#direct.memory">Direct Memory Usage In HBase</a>).</p>
+</div>
+<div class="paragraph">
+<p>Next thing to tune is the ByteBuffer pool on the RPC server side.
+The buffers from this pool will be used to accumulate the cell bytes and create a result cell block to send back to the client side.
+<code>hbase.ipc.server.reservoir.enabled</code> can be used to turn this pool ON or OFF. By default this pool is ON and available. HBase will create off heap ByteBuffers
+and pool them. Please make sure not to turn this OFF if you want end-to-end off-heaping in read path.
+If this pool is turned off, the server will create temp buffers on heap to accumulate the cell bytes and make a result cell block. This can impact the GC on a highly read loaded server.
+The user can tune this pool with respect to how many buffers are in the pool and what should be the size of each ByteBuffer.
+Use the config <code>hbase.ipc.server.reservoir.initial.buffer.size</code> to tune each of the buffer sizes. Default is 64 KB.</p>
+</div>
+<div class="paragraph">
+<p>When the read pattern is a random row read load and each of the rows are smaller in size compared to this 64 KB, try reducing this.
+When the result size is larger than one ByteBuffer size, the server will try to grab more than one buffer and make a result cell block out of these. When the pool is running out of buffers, the server will end up creating temporary on-heap buffers.</p>
+</div>
+<div class="paragraph">
+<p>The maximum number of ByteBuffers in the pool can be tuned using the config 'hbase.ipc.server.reservoir.initial.max'. Its value defaults to 64 * region server handlers configured (See the config 'hbase.regionserver.handler.count'). The math is such that by default we consider 2 MB as the result cell block size per read result and each handler will be handling a read. For 2 MB size, we need 32 buffers each of size 64 KB (See default buffer size in pool). So per handler 32 ByteBuffers(BB). We allocate twice this size as the max BBs count such that one handler can be creating the response and handing it to the RPC Responder thread and then handling a new request creating a new response cell block (using pooled buffers). Even if the responder could not send back the first TCP reply immediately, our count should allow that we should still have enough buffers in our pool without having to make temporary buffers on the heap. Again for smaller sized random row reads, tune this max count.
  There are lazily created buffers and the count is the max count to be pooled.</p>
+</div>
+<div class="paragraph">
+<p>If you still see GC issues even after making end-to-end read path off-heap, look for issues in the appropriate buffer pool. Check the below RegionServer log with INFO level:</p>
+</div>
+<div class="listingblock">
+<div class="content">
+<pre class="CodeRay highlight"><code data-lang="java">Pool already reached its max capacity : XXX and no free buffers now. Consider increasing the value <span class="keyword">for</span> <span class="string"><span class="delimiter">'</span><span class="content">hbase.ipc.server.reservoir.initial.max</span><span class="delimiter">'</span></span> ?</code></pre>
+</div>
+</div>
+<div class="paragraph">
+<p>The setting for <em>HBASE_OFFHEAPSIZE</em> in <em>hbase-env.sh</em> should consider this off heap buffer pool at the RPC side also. We need to config this max off heap size for the RegionServer as a bit higher than the sum of this max pool size and the off heap cache size. The TCP layer will also need to create direct bytebuffers for TCP communication. Also the DFS client will need some off-heap to do its workings especially if short-circuit reads are configured. Allocating an extra of 1 - 2 GB for the max direct memory size has worked in tests.</p>
+</div>
+<div class="paragraph">
+<p>If you are using co processors and refer the Cells in the read results, DO NOT store reference to these Cells out of the scope of the CP hook methods. Some times the CPs need store info about the cell (Like its row key) for considering in the next CP hook call etc. For such cases, pls clone the required fields of the entire Cell as per the use cases. [ See CellUtil#cloneXXX(Cell) APIs ]</p>
+</div>
+</div>
+<div class="sect3">
+<h4 id="regionserver.offheap.writepath"><a class="anchor" href="#regionserver.offheap.writepath"></a>70.5.2. Offheap write-path</h4>
+<div class="paragraph">
+<p>TODO</p>
+</div>
+</div>
+</div>
+<div class="sect2">
+<h3 id="regionserver_splitting_implementation"><a class="anchor" href="#regionserver_splitting_implementation"></a>70.6. RegionServer Splitting Implementation</h3>
 <div class="paragraph">
 <p>As write requests are handled by the region server, they accumulate in an in-memory storage system called the <em>memstore</em>. Once the memstore fills, its content are written to disk as additional store files. This event is called a <em>memstore flush</em>. As store files accumulate, the RegionServer will <a href="#compaction">compact</a> them into fewer, larger files. After each flush or compaction finishes, the amount of data stored in the region has changed. The RegionServer consults the region split policy to determine if the region has grown too large or should be split for another policy-specific reason. A region split request is enqueued if the policy recommends it.</p>
 </div>
@@ -14919,9 +15013,9 @@ Those reference files will point to the parent region&#8217;s files.</p>
 </div>
 </div>
 <div class="sect2">
-<h3 id="wal"><a class="anchor" href="#wal"></a>70.6. Write Ahead Log (WAL)</h3>
+<h3 id="wal"><a class="anchor" href="#wal"></a>70.7. Write Ahead Log (WAL)</h3>
 <div class="sect3">
-<h4 id="purpose.wal"><a class="anchor" href="#purpose.wal"></a>70.6.1. Purpose</h4>
+<h4 id="purpose.wal"><a class="anchor" href="#purpose.wal"></a>70.7.1. Purpose</h4>
 <div class="paragraph">
 <p>The <em>Write Ahead Log (WAL)</em> records all changes to data in HBase, to file-based storage.
 Under normal operations, the WAL is not needed because data changes move from the MemStore to StoreFiles.
@@ -14962,7 +15056,7 @@ You will likely find references to the HLog in documentation tailored to these o
 </div>
 </div>
 <div class="sect3">
-<h4 id="wal.providers"><a class="anchor" href="#wal.providers"></a>70.6.2. WAL Providers</h4>
+<h4 id="wal.providers"><a class="anchor" href="#wal.providers"></a>70.7.2. WAL Providers</h4>
 <div class="paragraph">
 <p>In HBase, there are a number of WAL imlementations (or 'Providers'). Each is known
 by a short name label (that unfortunately is not always descriptive). You set the provider in
@@ -14993,7 +15087,7 @@ by a short name label (that unfortunately is not always descriptive). You set th
 </div>
 </div>
 <div class="sect3">
-<h4 id="_multiwal"><a class="anchor" href="#_multiwal"></a>70.6.3. MultiWAL</h4>
+<h4 id="_multiwal"><a class="anchor" href="#_multiwal"></a>70.7.3. MultiWAL</h4>
 <div class="paragraph">
 <p>With a single WAL per RegionServer, the RegionServer must write to the WAL serially, because HDFS files must be sequential. This causes the WAL to be a performance bottleneck.</p>
 </div>
@@ -15023,13 +15117,13 @@ by a short name label (that unfortunately is not always descriptive). You set th
 </div>
 </div>
 <div class="sect3">
-<h4 id="wal_flush"><a class="anchor" href="#wal_flush"></a>70.6.4. WAL Flushing</h4>
+<h4 id="wal_flush"><a class="anchor" href="#wal_flush"></a>70.7.4. WAL Flushing</h4>
 <div class="paragraph">
 <p>TODO (describe).</p>
 </div>
 </div>
 <div class="sect3">
-<h4 id="_wal_splitting"><a class="anchor" href="#_wal_splitting"></a>70.6.5. WAL Splitting</h4>
+<h4 id="_wal_splitting"><a class="anchor" href="#_wal_splitting"></a>70.7.5. WAL Splitting</h4>
 <div class="paragraph">
 <p>A RegionServer serves many regions.
 All of the regions in a region server share the same active WAL file.
@@ -15362,7 +15456,7 @@ If none are found, it throws an exception so that the log splitting can be retri
 </div>
 </div>
 <div class="sect3">
-<h4 id="wal.compression"><a class="anchor" href="#wal.compression"></a>70.6.6. WAL Compression</h4>
+<h4 id="wal.compression"><a class="anchor" href="#wal.compression"></a>70.7.6. WAL Compression</h4>
 <div class="paragraph">
 <p>The content of the WAL can be compressed using LRU Dictionary compression.
 This can be used to speed up WAL replication to different datanodes.
@@ -15381,7 +15475,7 @@ dictionary because of an abrupt termination, a read of this last block may not b
 </div>
 </div>
 <div class="sect3">
-<h4 id="wal.durability"><a class="anchor" href="#wal.durability"></a>70.6.7. Durability</h4>
+<h4 id="wal.durability"><a class="anchor" href="#wal.durability"></a>70.7.7. Durability</h4>
 <div class="paragraph">
 <p>It is possible to set <em>durability</em> on each Mutation or on a Table basis. Options include:</p>
 </div>
@@ -15407,7 +15501,7 @@ options unfortunately closely named</p>
 </div>
 </div>
 <div class="sect3">
-<h4 id="wal.disable"><a class="anchor" href="#wal.disable"></a>70.6.8. Disabling the WAL</h4>
+<h4 id="wal.disable"><a class="anchor" href="#wal.disable"></a>70.7.8. Disabling the WAL</h4>
 <div class="paragraph">
 <p>It is possible to disable the WAL, to improve performance in certain specific situations.
 However, disabling the WAL puts your data at risk.
@@ -37730,7 +37824,7 @@ The server will return cellblocks compressed using this same compressor as long
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2018-04-19 14:29:51 UTC
+Last updated 2018-04-20 14:29:52 UTC
 </div>
 </div>
 </body>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/bulk-loads.html
----------------------------------------------------------------------
diff --git a/bulk-loads.html b/bulk-loads.html
index ef792c5..2c97d08 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20180419" />
+    <meta name="Date-Revision-yyyymmdd" content="20180420" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Bulk Loads in Apache HBase (TM)
@@ -306,7 +306,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-04-19</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-04-20</li>
             </p>
                 </div>
 


[15/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
index c4a43e7..0b83610 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
@@ -31,137 +31,138 @@
 <span class="sourceLineNo">023</span>import java.util.Set;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.util.concurrent.TimeUnit;<a name="line.24"></a>
 <span class="sourceLineNo">025</span><a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.commons.logging.Log;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.commons.logging.LogFactory;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.ScheduledChore;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.regionserver.MetricsRegionServer;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.regionserver.Region;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.regionserver.RegionServerServices;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.34"></a>
-<span class="sourceLineNo">035</span><a name="line.35"></a>
-<span class="sourceLineNo">036</span>/**<a name="line.36"></a>
-<span class="sourceLineNo">037</span> * A Chore which sends the region size reports on this RegionServer to the Master.<a name="line.37"></a>
-<span class="sourceLineNo">038</span> */<a name="line.38"></a>
-<span class="sourceLineNo">039</span>@InterfaceAudience.Private<a name="line.39"></a>
-<span class="sourceLineNo">040</span>public class RegionSizeReportingChore extends ScheduledChore {<a name="line.40"></a>
-<span class="sourceLineNo">041</span>  private static final Log LOG = LogFactory.getLog(RegionSizeReportingChore.class);<a name="line.41"></a>
-<span class="sourceLineNo">042</span><a name="line.42"></a>
-<span class="sourceLineNo">043</span>  static final String REGION_SIZE_REPORTING_CHORE_PERIOD_KEY =<a name="line.43"></a>
-<span class="sourceLineNo">044</span>      "hbase.regionserver.quotas.region.size.reporting.chore.period";<a name="line.44"></a>
-<span class="sourceLineNo">045</span>  static final int REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT = 1000 * 60;<a name="line.45"></a>
-<span class="sourceLineNo">046</span><a name="line.46"></a>
-<span class="sourceLineNo">047</span>  static final String REGION_SIZE_REPORTING_CHORE_DELAY_KEY =<a name="line.47"></a>
-<span class="sourceLineNo">048</span>      "hbase.regionserver.quotas.region.size.reporting.chore.delay";<a name="line.48"></a>
-<span class="sourceLineNo">049</span>  static final long REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT = 1000 * 30;<a name="line.49"></a>
-<span class="sourceLineNo">050</span><a name="line.50"></a>
-<span class="sourceLineNo">051</span>  static final String REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY =<a name="line.51"></a>
-<span class="sourceLineNo">052</span>      "hbase.regionserver.quotas.region.size.reporting.chore.timeunit";<a name="line.52"></a>
-<span class="sourceLineNo">053</span>  static final String REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT = TimeUnit.MILLISECONDS.name();<a name="line.53"></a>
-<span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>  private final RegionServerServices rsServices;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>  private final MetricsRegionServer metrics;<a name="line.56"></a>
-<span class="sourceLineNo">057</span><a name="line.57"></a>
-<span class="sourceLineNo">058</span>  public RegionSizeReportingChore(RegionServerServices rsServices) {<a name="line.58"></a>
-<span class="sourceLineNo">059</span>    super(<a name="line.59"></a>
-<span class="sourceLineNo">060</span>        RegionSizeReportingChore.class.getSimpleName(), rsServices,<a name="line.60"></a>
-<span class="sourceLineNo">061</span>        getPeriod(rsServices.getConfiguration()), getInitialDelay(rsServices.getConfiguration()),<a name="line.61"></a>
-<span class="sourceLineNo">062</span>        getTimeUnit(rsServices.getConfiguration()));<a name="line.62"></a>
-<span class="sourceLineNo">063</span>    this.rsServices = rsServices;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    this.metrics = rsServices.getMetrics();<a name="line.64"></a>
-<span class="sourceLineNo">065</span>  }<a name="line.65"></a>
-<span class="sourceLineNo">066</span><a name="line.66"></a>
-<span class="sourceLineNo">067</span>  @Override<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  protected void chore() {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    final long start = System.nanoTime();<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    try {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>      _chore();<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    } finally {<a name="line.72"></a>
-<span class="sourceLineNo">073</span>      if (metrics != null) {<a name="line.73"></a>
-<span class="sourceLineNo">074</span>        metrics.incrementRegionSizeReportingChoreTime(<a name="line.74"></a>
-<span class="sourceLineNo">075</span>            TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));<a name="line.75"></a>
-<span class="sourceLineNo">076</span>      }<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    }<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>  void _chore() {<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    final RegionServerSpaceQuotaManager quotaManager =<a name="line.81"></a>
-<span class="sourceLineNo">082</span>        rsServices.getRegionServerSpaceQuotaManager();<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    // Get the HRegionInfo for each online region<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    HashSet&lt;RegionInfo&gt; onlineRegionInfos = getOnlineRegionInfos(rsServices.getRegions());<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    RegionSizeStore store = quotaManager.getRegionSizeStore();<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    // Remove all sizes for non-online regions<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    removeNonOnlineRegions(store, onlineRegionInfos);<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    rsServices.reportRegionSizesForQuotas(store);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  }<a name="line.89"></a>
-<span class="sourceLineNo">090</span><a name="line.90"></a>
-<span class="sourceLineNo">091</span>  HashSet&lt;RegionInfo&gt; getOnlineRegionInfos(List&lt;? extends Region&gt; onlineRegions) {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    HashSet&lt;RegionInfo&gt; regionInfos = new HashSet&lt;&gt;();<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    onlineRegions.forEach((region) -&gt; regionInfos.add(region.getRegionInfo()));<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    return regionInfos;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>  }<a name="line.95"></a>
-<span class="sourceLineNo">096</span><a name="line.96"></a>
-<span class="sourceLineNo">097</span>  void removeNonOnlineRegions(RegionSizeStore store, Set&lt;RegionInfo&gt; onlineRegions) {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    // We have to remove regions which are no longer online from the store, otherwise they will<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    // continue to be sent to the Master which will prevent size report expiration.<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    if (onlineRegions.isEmpty()) {<a name="line.100"></a>
-<span class="sourceLineNo">101</span>      // Easy-case, no online regions means no size reports<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      store.clear();<a name="line.102"></a>
-<span class="sourceLineNo">103</span>      return;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>    }<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>    Iterator&lt;Entry&lt;RegionInfo,RegionSize&gt;&gt; iter = store.iterator();<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    int numEntriesRemoved = 0;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    while (iter.hasNext()) {<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      Entry&lt;RegionInfo,RegionSize&gt; entry = iter.next();<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      RegionInfo regionInfo = entry.getKey();<a name="line.110"></a>
-<span class="sourceLineNo">111</span>      if (!onlineRegions.contains(regionInfo)) {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>        numEntriesRemoved++;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>        iter.remove();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>      }<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    }<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    if (LOG.isTraceEnabled()) {<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      LOG.trace("Removed " + numEntriesRemoved + " region sizes before reporting to Master "<a name="line.117"></a>
-<span class="sourceLineNo">118</span>          + "because they are for non-online regions.");<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  /**<a name="line.122"></a>
-<span class="sourceLineNo">123</span>   * Extracts the period for the chore from the configuration.<a name="line.123"></a>
-<span class="sourceLineNo">124</span>   *<a name="line.124"></a>
-<span class="sourceLineNo">125</span>   * @param conf The configuration object.<a name="line.125"></a>
-<span class="sourceLineNo">126</span>   * @return The configured chore period or the default value.<a name="line.126"></a>
-<span class="sourceLineNo">127</span>   */<a name="line.127"></a>
-<span class="sourceLineNo">128</span>  static int getPeriod(Configuration conf) {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    return conf.getInt(<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        REGION_SIZE_REPORTING_CHORE_PERIOD_KEY, REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT);<a name="line.130"></a>
-<span class="sourceLineNo">131</span>  }<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>  /**<a name="line.133"></a>
-<span class="sourceLineNo">134</span>   * Extracts the initial delay for the chore from the configuration.<a name="line.134"></a>
-<span class="sourceLineNo">135</span>   *<a name="line.135"></a>
-<span class="sourceLineNo">136</span>   * @param conf The configuration object.<a name="line.136"></a>
-<span class="sourceLineNo">137</span>   * @return The configured chore initial delay or the default value.<a name="line.137"></a>
-<span class="sourceLineNo">138</span>   */<a name="line.138"></a>
-<span class="sourceLineNo">139</span>  static long getInitialDelay(Configuration conf) {<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    return conf.getLong(<a name="line.140"></a>
-<span class="sourceLineNo">141</span>        REGION_SIZE_REPORTING_CHORE_DELAY_KEY, REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
-<span class="sourceLineNo">143</span><a name="line.143"></a>
-<span class="sourceLineNo">144</span>  /**<a name="line.144"></a>
-<span class="sourceLineNo">145</span>   * Extracts the time unit for the chore period and initial delay from the configuration. The<a name="line.145"></a>
-<span class="sourceLineNo">146</span>   * configuration value for {@link #REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY} must correspond to a<a name="line.146"></a>
-<span class="sourceLineNo">147</span>   * {@link TimeUnit} value.<a name="line.147"></a>
-<span class="sourceLineNo">148</span>   *<a name="line.148"></a>
-<span class="sourceLineNo">149</span>   * @param conf The configuration object.<a name="line.149"></a>
-<span class="sourceLineNo">150</span>   * @return The configured time unit for the chore period and initial delay or the default value.<a name="line.150"></a>
-<span class="sourceLineNo">151</span>   */<a name="line.151"></a>
-<span class="sourceLineNo">152</span>  static TimeUnit getTimeUnit(Configuration conf) {<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    return TimeUnit.valueOf(conf.get(REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>        REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT));<a name="line.154"></a>
-<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
-<span class="sourceLineNo">156</span>}<a name="line.156"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.conf.Configuration;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.ScheduledChore;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.regionserver.MetricsRegionServer;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.regionserver.Region;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.regionserver.RegionServerServices;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.32"></a>
+<span class="sourceLineNo">033</span><a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.slf4j.Logger;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.slf4j.LoggerFactory;<a name="line.35"></a>
+<span class="sourceLineNo">036</span><a name="line.36"></a>
+<span class="sourceLineNo">037</span>/**<a name="line.37"></a>
+<span class="sourceLineNo">038</span> * A Chore which sends the region size reports on this RegionServer to the Master.<a name="line.38"></a>
+<span class="sourceLineNo">039</span> */<a name="line.39"></a>
+<span class="sourceLineNo">040</span>@InterfaceAudience.Private<a name="line.40"></a>
+<span class="sourceLineNo">041</span>public class RegionSizeReportingChore extends ScheduledChore {<a name="line.41"></a>
+<span class="sourceLineNo">042</span>  private static final Logger LOG = LoggerFactory.getLogger(RegionSizeReportingChore.class);<a name="line.42"></a>
+<span class="sourceLineNo">043</span><a name="line.43"></a>
+<span class="sourceLineNo">044</span>  static final String REGION_SIZE_REPORTING_CHORE_PERIOD_KEY =<a name="line.44"></a>
+<span class="sourceLineNo">045</span>      "hbase.regionserver.quotas.region.size.reporting.chore.period";<a name="line.45"></a>
+<span class="sourceLineNo">046</span>  static final int REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT = 1000 * 60;<a name="line.46"></a>
+<span class="sourceLineNo">047</span><a name="line.47"></a>
+<span class="sourceLineNo">048</span>  static final String REGION_SIZE_REPORTING_CHORE_DELAY_KEY =<a name="line.48"></a>
+<span class="sourceLineNo">049</span>      "hbase.regionserver.quotas.region.size.reporting.chore.delay";<a name="line.49"></a>
+<span class="sourceLineNo">050</span>  static final long REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT = 1000 * 30;<a name="line.50"></a>
+<span class="sourceLineNo">051</span><a name="line.51"></a>
+<span class="sourceLineNo">052</span>  static final String REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY =<a name="line.52"></a>
+<span class="sourceLineNo">053</span>      "hbase.regionserver.quotas.region.size.reporting.chore.timeunit";<a name="line.53"></a>
+<span class="sourceLineNo">054</span>  static final String REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT = TimeUnit.MILLISECONDS.name();<a name="line.54"></a>
+<span class="sourceLineNo">055</span><a name="line.55"></a>
+<span class="sourceLineNo">056</span>  private final RegionServerServices rsServices;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>  private final MetricsRegionServer metrics;<a name="line.57"></a>
+<span class="sourceLineNo">058</span><a name="line.58"></a>
+<span class="sourceLineNo">059</span>  public RegionSizeReportingChore(RegionServerServices rsServices) {<a name="line.59"></a>
+<span class="sourceLineNo">060</span>    super(<a name="line.60"></a>
+<span class="sourceLineNo">061</span>        RegionSizeReportingChore.class.getSimpleName(), rsServices,<a name="line.61"></a>
+<span class="sourceLineNo">062</span>        getPeriod(rsServices.getConfiguration()), getInitialDelay(rsServices.getConfiguration()),<a name="line.62"></a>
+<span class="sourceLineNo">063</span>        getTimeUnit(rsServices.getConfiguration()));<a name="line.63"></a>
+<span class="sourceLineNo">064</span>    this.rsServices = rsServices;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    this.metrics = rsServices.getMetrics();<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  }<a name="line.66"></a>
+<span class="sourceLineNo">067</span><a name="line.67"></a>
+<span class="sourceLineNo">068</span>  @Override<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  protected void chore() {<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    final long start = System.nanoTime();<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    try {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>      _chore();<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    } finally {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      if (metrics != null) {<a name="line.74"></a>
+<span class="sourceLineNo">075</span>        metrics.incrementRegionSizeReportingChoreTime(<a name="line.75"></a>
+<span class="sourceLineNo">076</span>            TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      }<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    }<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  void _chore() {<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    final RegionServerSpaceQuotaManager quotaManager =<a name="line.82"></a>
+<span class="sourceLineNo">083</span>        rsServices.getRegionServerSpaceQuotaManager();<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    // Get the HRegionInfo for each online region<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    HashSet&lt;RegionInfo&gt; onlineRegionInfos = getOnlineRegionInfos(rsServices.getRegions());<a name="line.85"></a>
+<span class="sourceLineNo">086</span>    RegionSizeStore store = quotaManager.getRegionSizeStore();<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    // Remove all sizes for non-online regions<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    removeNonOnlineRegions(store, onlineRegionInfos);<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    rsServices.reportRegionSizesForQuotas(store);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  }<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>  HashSet&lt;RegionInfo&gt; getOnlineRegionInfos(List&lt;? extends Region&gt; onlineRegions) {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    HashSet&lt;RegionInfo&gt; regionInfos = new HashSet&lt;&gt;();<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    onlineRegions.forEach((region) -&gt; regionInfos.add(region.getRegionInfo()));<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    return regionInfos;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  }<a name="line.96"></a>
+<span class="sourceLineNo">097</span><a name="line.97"></a>
+<span class="sourceLineNo">098</span>  void removeNonOnlineRegions(RegionSizeStore store, Set&lt;RegionInfo&gt; onlineRegions) {<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    // We have to remove regions which are no longer online from the store, otherwise they will<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    // continue to be sent to the Master which will prevent size report expiration.<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    if (onlineRegions.isEmpty()) {<a name="line.101"></a>
+<span class="sourceLineNo">102</span>      // Easy-case, no online regions means no size reports<a name="line.102"></a>
+<span class="sourceLineNo">103</span>      store.clear();<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      return;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    }<a name="line.105"></a>
+<span class="sourceLineNo">106</span><a name="line.106"></a>
+<span class="sourceLineNo">107</span>    Iterator&lt;Entry&lt;RegionInfo,RegionSize&gt;&gt; iter = store.iterator();<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    int numEntriesRemoved = 0;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    while (iter.hasNext()) {<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      Entry&lt;RegionInfo,RegionSize&gt; entry = iter.next();<a name="line.110"></a>
+<span class="sourceLineNo">111</span>      RegionInfo regionInfo = entry.getKey();<a name="line.111"></a>
+<span class="sourceLineNo">112</span>      if (!onlineRegions.contains(regionInfo)) {<a name="line.112"></a>
+<span class="sourceLineNo">113</span>        numEntriesRemoved++;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>        iter.remove();<a name="line.114"></a>
+<span class="sourceLineNo">115</span>      }<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    }<a name="line.116"></a>
+<span class="sourceLineNo">117</span>    if (LOG.isTraceEnabled()) {<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      LOG.trace("Removed " + numEntriesRemoved + " region sizes before reporting to Master "<a name="line.118"></a>
+<span class="sourceLineNo">119</span>          + "because they are for non-online regions.");<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    }<a name="line.120"></a>
+<span class="sourceLineNo">121</span>  }<a name="line.121"></a>
+<span class="sourceLineNo">122</span><a name="line.122"></a>
+<span class="sourceLineNo">123</span>  /**<a name="line.123"></a>
+<span class="sourceLineNo">124</span>   * Extracts the period for the chore from the configuration.<a name="line.124"></a>
+<span class="sourceLineNo">125</span>   *<a name="line.125"></a>
+<span class="sourceLineNo">126</span>   * @param conf The configuration object.<a name="line.126"></a>
+<span class="sourceLineNo">127</span>   * @return The configured chore period or the default value.<a name="line.127"></a>
+<span class="sourceLineNo">128</span>   */<a name="line.128"></a>
+<span class="sourceLineNo">129</span>  static int getPeriod(Configuration conf) {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    return conf.getInt(<a name="line.130"></a>
+<span class="sourceLineNo">131</span>        REGION_SIZE_REPORTING_CHORE_PERIOD_KEY, REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT);<a name="line.131"></a>
+<span class="sourceLineNo">132</span>  }<a name="line.132"></a>
+<span class="sourceLineNo">133</span><a name="line.133"></a>
+<span class="sourceLineNo">134</span>  /**<a name="line.134"></a>
+<span class="sourceLineNo">135</span>   * Extracts the initial delay for the chore from the configuration.<a name="line.135"></a>
+<span class="sourceLineNo">136</span>   *<a name="line.136"></a>
+<span class="sourceLineNo">137</span>   * @param conf The configuration object.<a name="line.137"></a>
+<span class="sourceLineNo">138</span>   * @return The configured chore initial delay or the default value.<a name="line.138"></a>
+<span class="sourceLineNo">139</span>   */<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  static long getInitialDelay(Configuration conf) {<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    return conf.getLong(<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        REGION_SIZE_REPORTING_CHORE_DELAY_KEY, REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT);<a name="line.142"></a>
+<span class="sourceLineNo">143</span>  }<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>  /**<a name="line.145"></a>
+<span class="sourceLineNo">146</span>   * Extracts the time unit for the chore period and initial delay from the configuration. The<a name="line.146"></a>
+<span class="sourceLineNo">147</span>   * configuration value for {@link #REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY} must correspond to a<a name="line.147"></a>
+<span class="sourceLineNo">148</span>   * {@link TimeUnit} value.<a name="line.148"></a>
+<span class="sourceLineNo">149</span>   *<a name="line.149"></a>
+<span class="sourceLineNo">150</span>   * @param conf The configuration object.<a name="line.150"></a>
+<span class="sourceLineNo">151</span>   * @return The configured time unit for the chore period and initial delay or the default value.<a name="line.151"></a>
+<span class="sourceLineNo">152</span>   */<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  static TimeUnit getTimeUnit(Configuration conf) {<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    return TimeUnit.valueOf(conf.get(REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY,<a name="line.154"></a>
+<span class="sourceLineNo">155</span>        REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT));<a name="line.155"></a>
+<span class="sourceLineNo">156</span>  }<a name="line.156"></a>
+<span class="sourceLineNo">157</span>}<a name="line.157"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html
index d096ac4..2b1c376 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html
@@ -28,89 +28,90 @@
 <span class="sourceLineNo">020</span>import java.util.Map.Entry;<a name="line.20"></a>
 <span class="sourceLineNo">021</span>import java.util.concurrent.ConcurrentHashMap;<a name="line.21"></a>
 <span class="sourceLineNo">022</span><a name="line.22"></a>
-<span class="sourceLineNo">023</span>import org.apache.commons.logging.Log;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import org.apache.commons.logging.LogFactory;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.28"></a>
-<span class="sourceLineNo">029</span><a name="line.29"></a>
-<span class="sourceLineNo">030</span>/**<a name="line.30"></a>
-<span class="sourceLineNo">031</span> * A {@link RegionSizeStore} implementation backed by a ConcurrentHashMap. We expected similar<a name="line.31"></a>
-<span class="sourceLineNo">032</span> * amounts of reads and writes to the "store", so using a RWLock is not going to provide any<a name="line.32"></a>
-<span class="sourceLineNo">033</span> * exceptional gains.<a name="line.33"></a>
-<span class="sourceLineNo">034</span> */<a name="line.34"></a>
-<span class="sourceLineNo">035</span>@InterfaceAudience.Private<a name="line.35"></a>
-<span class="sourceLineNo">036</span>public class RegionSizeStoreImpl implements RegionSizeStore {<a name="line.36"></a>
-<span class="sourceLineNo">037</span>  private static final Log LOG = LogFactory.getLog(RegionSizeStoreImpl.class);<a name="line.37"></a>
-<span class="sourceLineNo">038</span>  private static final long sizeOfEntry = ClassSize.align(<a name="line.38"></a>
-<span class="sourceLineNo">039</span>      ClassSize.CONCURRENT_HASHMAP_ENTRY<a name="line.39"></a>
-<span class="sourceLineNo">040</span>      + ClassSize.OBJECT + Bytes.SIZEOF_LONG<a name="line.40"></a>
-<span class="sourceLineNo">041</span>      // TODO Have RegionInfo implement HeapSize. 100B is an approximation based on a heapdump.<a name="line.41"></a>
-<span class="sourceLineNo">042</span>      + ClassSize.OBJECT + 100);<a name="line.42"></a>
-<span class="sourceLineNo">043</span>  private final ConcurrentHashMap&lt;RegionInfo,RegionSize&gt; store;<a name="line.43"></a>
-<span class="sourceLineNo">044</span><a name="line.44"></a>
-<span class="sourceLineNo">045</span>  public RegionSizeStoreImpl() {<a name="line.45"></a>
-<span class="sourceLineNo">046</span>    store = new ConcurrentHashMap&lt;&gt;();<a name="line.46"></a>
-<span class="sourceLineNo">047</span>  }<a name="line.47"></a>
-<span class="sourceLineNo">048</span><a name="line.48"></a>
-<span class="sourceLineNo">049</span>  @Override<a name="line.49"></a>
-<span class="sourceLineNo">050</span>  public Iterator&lt;Entry&lt;RegionInfo,RegionSize&gt;&gt; iterator() {<a name="line.50"></a>
-<span class="sourceLineNo">051</span>    return store.entrySet().iterator();<a name="line.51"></a>
-<span class="sourceLineNo">052</span>  }<a name="line.52"></a>
-<span class="sourceLineNo">053</span><a name="line.53"></a>
-<span class="sourceLineNo">054</span>  @Override<a name="line.54"></a>
-<span class="sourceLineNo">055</span>  public RegionSize getRegionSize(RegionInfo regionInfo) {<a name="line.55"></a>
-<span class="sourceLineNo">056</span>    return store.get(regionInfo);<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  }<a name="line.57"></a>
-<span class="sourceLineNo">058</span><a name="line.58"></a>
-<span class="sourceLineNo">059</span>  @Override<a name="line.59"></a>
-<span class="sourceLineNo">060</span>  public void put(RegionInfo regionInfo, long size) {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    if (LOG.isTraceEnabled()) {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>      LOG.trace("Setting space quota size for " + regionInfo + " to " + size);<a name="line.62"></a>
-<span class="sourceLineNo">063</span>    }<a name="line.63"></a>
-<span class="sourceLineNo">064</span>    // Atomic. Either sets the new size for the first time, or replaces the existing value.<a name="line.64"></a>
-<span class="sourceLineNo">065</span>    store.compute(regionInfo,<a name="line.65"></a>
-<span class="sourceLineNo">066</span>      (key,value) -&gt; value == null ? new RegionSizeImpl(size) : value.setSize(size));<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  }<a name="line.67"></a>
-<span class="sourceLineNo">068</span><a name="line.68"></a>
-<span class="sourceLineNo">069</span>  @Override<a name="line.69"></a>
-<span class="sourceLineNo">070</span>  public void incrementRegionSize(RegionInfo regionInfo, long delta) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    if (LOG.isTraceEnabled()) {<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      LOG.trace("Updating space quota size for " + regionInfo + " with a delta of " + delta);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    }<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    // Atomic. Recomputes the stored value with the delta if there is one, otherwise use the delta.<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    store.compute(regionInfo,<a name="line.75"></a>
-<span class="sourceLineNo">076</span>      (key,value) -&gt; value == null ? new RegionSizeImpl(delta) : value.incrementSize(delta));<a name="line.76"></a>
-<span class="sourceLineNo">077</span>  }<a name="line.77"></a>
-<span class="sourceLineNo">078</span><a name="line.78"></a>
-<span class="sourceLineNo">079</span>  @Override<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  public RegionSize remove(RegionInfo regionInfo) {<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    return store.remove(regionInfo);<a name="line.81"></a>
-<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
-<span class="sourceLineNo">083</span><a name="line.83"></a>
-<span class="sourceLineNo">084</span>  @Override<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public long heapSize() {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    // Will have to iterate over each element if RegionInfo implements HeapSize, for now it's just<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    // a simple calculation.<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    return sizeOfEntry * store.size();<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  }<a name="line.89"></a>
-<span class="sourceLineNo">090</span><a name="line.90"></a>
-<span class="sourceLineNo">091</span>  @Override<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  public int size() {<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    return store.size();<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  }<a name="line.94"></a>
-<span class="sourceLineNo">095</span><a name="line.95"></a>
-<span class="sourceLineNo">096</span>  @Override<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  public boolean isEmpty() {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    return store.isEmpty();<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  @Override<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  public void clear() {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>    store.clear();<a name="line.103"></a>
-<span class="sourceLineNo">104</span>  }<a name="line.104"></a>
-<span class="sourceLineNo">105</span>}<a name="line.105"></a>
+<span class="sourceLineNo">023</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.26"></a>
+<span class="sourceLineNo">027</span><a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.slf4j.Logger;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.slf4j.LoggerFactory;<a name="line.29"></a>
+<span class="sourceLineNo">030</span><a name="line.30"></a>
+<span class="sourceLineNo">031</span>/**<a name="line.31"></a>
+<span class="sourceLineNo">032</span> * A {@link RegionSizeStore} implementation backed by a ConcurrentHashMap. We expected similar<a name="line.32"></a>
+<span class="sourceLineNo">033</span> * amounts of reads and writes to the "store", so using a RWLock is not going to provide any<a name="line.33"></a>
+<span class="sourceLineNo">034</span> * exceptional gains.<a name="line.34"></a>
+<span class="sourceLineNo">035</span> */<a name="line.35"></a>
+<span class="sourceLineNo">036</span>@InterfaceAudience.Private<a name="line.36"></a>
+<span class="sourceLineNo">037</span>public class RegionSizeStoreImpl implements RegionSizeStore {<a name="line.37"></a>
+<span class="sourceLineNo">038</span>  private static final Logger LOG = LoggerFactory.getLogger(RegionSizeStoreImpl.class);<a name="line.38"></a>
+<span class="sourceLineNo">039</span>  private static final long sizeOfEntry = ClassSize.align(<a name="line.39"></a>
+<span class="sourceLineNo">040</span>      ClassSize.CONCURRENT_HASHMAP_ENTRY<a name="line.40"></a>
+<span class="sourceLineNo">041</span>      + ClassSize.OBJECT + Bytes.SIZEOF_LONG<a name="line.41"></a>
+<span class="sourceLineNo">042</span>      // TODO Have RegionInfo implement HeapSize. 100B is an approximation based on a heapdump.<a name="line.42"></a>
+<span class="sourceLineNo">043</span>      + ClassSize.OBJECT + 100);<a name="line.43"></a>
+<span class="sourceLineNo">044</span>  private final ConcurrentHashMap&lt;RegionInfo,RegionSize&gt; store;<a name="line.44"></a>
+<span class="sourceLineNo">045</span><a name="line.45"></a>
+<span class="sourceLineNo">046</span>  public RegionSizeStoreImpl() {<a name="line.46"></a>
+<span class="sourceLineNo">047</span>    store = new ConcurrentHashMap&lt;&gt;();<a name="line.47"></a>
+<span class="sourceLineNo">048</span>  }<a name="line.48"></a>
+<span class="sourceLineNo">049</span><a name="line.49"></a>
+<span class="sourceLineNo">050</span>  @Override<a name="line.50"></a>
+<span class="sourceLineNo">051</span>  public Iterator&lt;Entry&lt;RegionInfo,RegionSize&gt;&gt; iterator() {<a name="line.51"></a>
+<span class="sourceLineNo">052</span>    return store.entrySet().iterator();<a name="line.52"></a>
+<span class="sourceLineNo">053</span>  }<a name="line.53"></a>
+<span class="sourceLineNo">054</span><a name="line.54"></a>
+<span class="sourceLineNo">055</span>  @Override<a name="line.55"></a>
+<span class="sourceLineNo">056</span>  public RegionSize getRegionSize(RegionInfo regionInfo) {<a name="line.56"></a>
+<span class="sourceLineNo">057</span>    return store.get(regionInfo);<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  }<a name="line.58"></a>
+<span class="sourceLineNo">059</span><a name="line.59"></a>
+<span class="sourceLineNo">060</span>  @Override<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  public void put(RegionInfo regionInfo, long size) {<a name="line.61"></a>
+<span class="sourceLineNo">062</span>    if (LOG.isTraceEnabled()) {<a name="line.62"></a>
+<span class="sourceLineNo">063</span>      LOG.trace("Setting space quota size for " + regionInfo + " to " + size);<a name="line.63"></a>
+<span class="sourceLineNo">064</span>    }<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    // Atomic. Either sets the new size for the first time, or replaces the existing value.<a name="line.65"></a>
+<span class="sourceLineNo">066</span>    store.compute(regionInfo,<a name="line.66"></a>
+<span class="sourceLineNo">067</span>      (key,value) -&gt; value == null ? new RegionSizeImpl(size) : value.setSize(size));<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  }<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>  @Override<a name="line.70"></a>
+<span class="sourceLineNo">071</span>  public void incrementRegionSize(RegionInfo regionInfo, long delta) {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    if (LOG.isTraceEnabled()) {<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      LOG.trace("Updating space quota size for " + regionInfo + " with a delta of " + delta);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    }<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    // Atomic. Recomputes the stored value with the delta if there is one, otherwise use the delta.<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    store.compute(regionInfo,<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      (key,value) -&gt; value == null ? new RegionSizeImpl(delta) : value.incrementSize(delta));<a name="line.77"></a>
+<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
+<span class="sourceLineNo">079</span><a name="line.79"></a>
+<span class="sourceLineNo">080</span>  @Override<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  public RegionSize remove(RegionInfo regionInfo) {<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    return store.remove(regionInfo);<a name="line.82"></a>
+<span class="sourceLineNo">083</span>  }<a name="line.83"></a>
+<span class="sourceLineNo">084</span><a name="line.84"></a>
+<span class="sourceLineNo">085</span>  @Override<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  public long heapSize() {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    // Will have to iterate over each element if RegionInfo implements HeapSize, for now it's just<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    // a simple calculation.<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    return sizeOfEntry * store.size();<a name="line.89"></a>
+<span class="sourceLineNo">090</span>  }<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>  @Override<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  public int size() {<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    return store.size();<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  }<a name="line.95"></a>
+<span class="sourceLineNo">096</span><a name="line.96"></a>
+<span class="sourceLineNo">097</span>  @Override<a name="line.97"></a>
+<span class="sourceLineNo">098</span>  public boolean isEmpty() {<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    return store.isEmpty();<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>  @Override<a name="line.102"></a>
+<span class="sourceLineNo">103</span>  public void clear() {<a name="line.103"></a>
+<span class="sourceLineNo">104</span>    store.clear();<a name="line.104"></a>
+<span class="sourceLineNo">105</span>  }<a name="line.105"></a>
+<span class="sourceLineNo">106</span>}<a name="line.106"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
index 7137829..4a879bb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.Visitor.html
@@ -78,124 +78,124 @@
 <span class="sourceLineNo">070</span>import java.util.concurrent.locks.ReadWriteLock;<a name="line.70"></a>
 <span class="sourceLineNo">071</span>import java.util.concurrent.locks.ReentrantReadWriteLock;<a name="line.71"></a>
 <span class="sourceLineNo">072</span>import java.util.function.Function;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.commons.collections.CollectionUtils;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.conf.Configuration;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileStatus;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.FileSystem;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.fs.Path;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.Cell;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.92"></a>
-<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.TableName;<a name="line.99"></a>
-<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.Tag;<a name="line.100"></a>
-<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.Append;<a name="line.103"></a>
-<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.105"></a>
-<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.106"></a>
-<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.107"></a>
-<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Get;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.109"></a>
-<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.110"></a>
-<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.112"></a>
-<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.Put;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.115"></a>
-<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.116"></a>
-<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.Result;<a name="line.117"></a>
-<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.118"></a>
-<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.119"></a>
-<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.120"></a>
-<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.121"></a>
-<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.124"></a>
-<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.125"></a>
-<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.127"></a>
-<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.128"></a>
-<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.129"></a>
-<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.131"></a>
-<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.132"></a>
-<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.133"></a>
-<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.136"></a>
-<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.137"></a>
-<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.142"></a>
-<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.144"></a>
-<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.security.User;<a name="line.153"></a>
-<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.154"></a>
-<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.155"></a>
-<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.156"></a>
-<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.157"></a>
-<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.158"></a>
-<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.159"></a>
-<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.160"></a>
-<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.161"></a>
-<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.162"></a>
-<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.163"></a>
-<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.164"></a>
-<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.167"></a>
-<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.168"></a>
-<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.170"></a>
-<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.172"></a>
-<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.174"></a>
-<span class="sourceLineNo">175</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.175"></a>
-<span class="sourceLineNo">176</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>import org.apache.hadoop.util.StringUtils;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>import org.apache.htrace.core.TraceScope;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.179"></a>
-<span class="sourceLineNo">180</span>import org.slf4j.Logger;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>import org.slf4j.LoggerFactory;<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.183"></a>
-<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.185"></a>
-<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.187"></a>
-<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.190"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.conf.Configuration;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.fs.FileStatus;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.fs.FileSystem;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.fs.LocatedFileStatus;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.fs.Path;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.Cell;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.CellBuilderType;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.CellComparator;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.CellComparatorImpl;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.CompareOperator;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.CompoundConfiguration;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.DoNotRetryIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.DroppedSnapshotException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.apache.hadoop.hbase.HConstants;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.apache.hadoop.hbase.HConstants.OperationStatusCode;<a name="line.90"></a>
+<span class="sourceLineNo">091</span>import org.apache.hadoop.hbase.HDFSBlocksDistribution;<a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hadoop.hbase.KeyValue;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hadoop.hbase.KeyValueUtil;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hadoop.hbase.NamespaceDescriptor;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hadoop.hbase.NotServingRegionException;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hadoop.hbase.PrivateCellUtil;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hadoop.hbase.RegionTooBusyException;<a name="line.97"></a>
+<span class="sourceLineNo">098</span>import org.apache.hadoop.hbase.TableName;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.Tag;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.TagUtil;<a name="line.100"></a>
+<span class="sourceLineNo">101</span>import org.apache.hadoop.hbase.UnknownScannerException;<a name="line.101"></a>
+<span class="sourceLineNo">102</span>import org.apache.hadoop.hbase.client.Append;<a name="line.102"></a>
+<span class="sourceLineNo">103</span>import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>import org.apache.hadoop.hbase.client.CompactionState;<a name="line.104"></a>
+<span class="sourceLineNo">105</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>import org.apache.hadoop.hbase.client.Get;<a name="line.107"></a>
+<span class="sourceLineNo">108</span>import org.apache.hadoop.hbase.client.Increment;<a name="line.108"></a>
+<span class="sourceLineNo">109</span>import org.apache.hadoop.hbase.client.IsolationLevel;<a name="line.109"></a>
+<span class="sourceLineNo">110</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;<a name="line.111"></a>
+<span class="sourceLineNo">112</span>import org.apache.hadoop.hbase.client.Put;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>import org.apache.hadoop.hbase.client.RegionInfo;<a name="line.113"></a>
+<span class="sourceLineNo">114</span>import org.apache.hadoop.hbase.client.RegionInfoBuilder;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>import org.apache.hadoop.hbase.client.RegionReplicaUtil;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>import org.apache.hadoop.hbase.client.Result;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>import org.apache.hadoop.hbase.client.RowMutations;<a name="line.117"></a>
+<span class="sourceLineNo">118</span>import org.apache.hadoop.hbase.client.Scan;<a name="line.118"></a>
+<span class="sourceLineNo">119</span>import org.apache.hadoop.hbase.client.TableDescriptor;<a name="line.119"></a>
+<span class="sourceLineNo">120</span>import org.apache.hadoop.hbase.client.TableDescriptorBuilder;<a name="line.120"></a>
+<span class="sourceLineNo">121</span>import org.apache.hadoop.hbase.conf.ConfigurationManager;<a name="line.121"></a>
+<span class="sourceLineNo">122</span>import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;<a name="line.122"></a>
+<span class="sourceLineNo">123</span>import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;<a name="line.125"></a>
+<span class="sourceLineNo">126</span>import org.apache.hadoop.hbase.exceptions.TimeoutIOException;<a name="line.126"></a>
+<span class="sourceLineNo">127</span>import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>import org.apache.hadoop.hbase.filter.ByteArrayComparable;<a name="line.128"></a>
+<span class="sourceLineNo">129</span>import org.apache.hadoop.hbase.filter.FilterWrapper;<a name="line.129"></a>
+<span class="sourceLineNo">130</span>import org.apache.hadoop.hbase.filter.IncompatibleFilterException;<a name="line.130"></a>
+<span class="sourceLineNo">131</span>import org.apache.hadoop.hbase.io.HFileLink;<a name="line.131"></a>
+<span class="sourceLineNo">132</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.132"></a>
+<span class="sourceLineNo">133</span>import org.apache.hadoop.hbase.io.TimeRange;<a name="line.133"></a>
+<span class="sourceLineNo">134</span>import org.apache.hadoop.hbase.io.hfile.HFile;<a name="line.134"></a>
+<span class="sourceLineNo">135</span>import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>import org.apache.hadoop.hbase.ipc.RpcCall;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>import org.apache.hadoop.hbase.ipc.RpcServer;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;<a name="line.141"></a>
+<span class="sourceLineNo">142</span>import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;<a name="line.143"></a>
+<span class="sourceLineNo">144</span>import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;<a name="line.144"></a>
+<span class="sourceLineNo">145</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;<a name="line.145"></a>
+<span class="sourceLineNo">146</span>import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>import org.apache.hadoop.hbase.regionserver.wal.WALUtil;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>import org.apache.hadoop.hbase.security.User;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>import org.apache.hadoop.hbase.snapshot.SnapshotManifest;<a name="line.154"></a>
+<span class="sourceLineNo">155</span>import org.apache.hadoop.hbase.trace.TraceUtil;<a name="line.155"></a>
+<span class="sourceLineNo">156</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.156"></a>
+<span class="sourceLineNo">157</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.157"></a>
+<span class="sourceLineNo">158</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.158"></a>
+<span class="sourceLineNo">159</span>import org.apache.hadoop.hbase.util.CompressionTest;<a name="line.159"></a>
+<span class="sourceLineNo">160</span>import org.apache.hadoop.hbase.util.EncryptionTest;<a name="line.160"></a>
+<span class="sourceLineNo">161</span>import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;<a name="line.161"></a>
+<span class="sourceLineNo">162</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.162"></a>
+<span class="sourceLineNo">163</span>import org.apache.hadoop.hbase.util.HashedBytes;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>import org.apache.hadoop.hbase.util.NonceKey;<a name="line.164"></a>
+<span class="sourceLineNo">165</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.165"></a>
+<span class="sourceLineNo">166</span>import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;<a name="line.166"></a>
+<span class="sourceLineNo">167</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.168"></a>
+<span class="sourceLineNo">169</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.169"></a>
+<span class="sourceLineNo">170</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.171"></a>
+<span class="sourceLineNo">172</span>import org.apache.hadoop.hbase.wal.WALKeyImpl;<a name="line.172"></a>
+<span class="sourceLineNo">173</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;<a name="line.174"></a>
+<span class="sourceLineNo">175</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>import org.apache.hadoop.util.StringUtils;<a name="line.176"></a>
+<span class="sourceLineNo">177</span>import org.apache.htrace.core.TraceScope;<a name="line.177"></a>
+<span class="sourceLineNo">178</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>import org.slf4j.Logger;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>import org.slf4j.LoggerFactory;<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.183"></a>
+<span class="sourceLineNo">184</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.184"></a>
+<span class="sourceLineNo">185</span>import org.apache.hbase.thirdparty.com.google.common.collect.Maps;<a name="line.185"></a>
+<span class="sourceLineNo">186</span>import org.apache.hbase.thirdparty.com.google.common.io.Closeables;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>import org.apache.hbase.thirdparty.com.google.protobuf.Service;<a name="line.187"></a>
+<span class="sourceLineNo">188</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;<a name="line.189"></a>
+<span class="sourceLineNo">190</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.190"></a>
 <span class="sourceLineNo">191</span><a name="line.191"></a>
 <span class="sourceLineNo">192</span>import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;<a name="line.192"></a>
 <span class="sourceLineNo">193</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;<a name="line.193"></a>


[06/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.EntryBuffers.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.LogRecoveredEditsOutputSink.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.LogRecoveredEditsOutputSink.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.LogRecoveredEditsOutputSink.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.LogRecoveredEditsOutputSink.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.LogRecoveredEditsOutputSink.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.MutationReplay.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.MutationReplay.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.MutationReplay.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.MutationReplay.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.MutationReplay.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.OutputSink.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.OutputSink.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.OutputSink.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.OutputSink.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.OutputSink.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.PipelineController.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.RegionEntryBuffer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.RegionEntryBuffer.html b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.RegionEntryBuffer.html
index 9f6a1bd..99f53c4 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.RegionEntryBuffer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.RegionEntryBuffer.html
@@ -54,55 +54,55 @@
 <span class="sourceLineNo">046</span>import java.util.concurrent.atomic.AtomicReference;<a name="line.46"></a>
 <span class="sourceLineNo">047</span>import java.util.regex.Matcher;<a name="line.47"></a>
 <span class="sourceLineNo">048</span>import java.util.regex.Pattern;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.commons.collections.CollectionUtils;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.commons.collections.MapUtils;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.apache.hadoop.conf.Configuration;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.FileStatus;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.FileSystem;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.apache.hadoop.fs.Path;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.apache.hadoop.fs.PathFilter;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.Cell;<a name="line.58"></a>
-<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.60"></a>
-<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.HConstants;<a name="line.62"></a>
-<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.TableName;<a name="line.63"></a>
-<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.64"></a>
-<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.65"></a>
-<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.client.Put;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.68"></a>
-<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.69"></a>
-<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.70"></a>
-<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.72"></a>
-<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.73"></a>
-<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.74"></a>
-<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.75"></a>
-<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.77"></a>
-<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.78"></a>
-<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.79"></a>
-<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.80"></a>
-<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.81"></a>
-<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.82"></a>
-<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.84"></a>
-<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.87"></a>
-<span class="sourceLineNo">088</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.88"></a>
-<span class="sourceLineNo">089</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.89"></a>
-<span class="sourceLineNo">090</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.90"></a>
-<span class="sourceLineNo">091</span>import org.slf4j.Logger;<a name="line.91"></a>
-<span class="sourceLineNo">092</span>import org.slf4j.LoggerFactory;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.94"></a>
-<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.95"></a>
-<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.96"></a>
-<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.97"></a>
+<span class="sourceLineNo">049</span>import org.apache.commons.lang3.ArrayUtils;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.conf.Configuration;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.fs.FileAlreadyExistsException;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.apache.hadoop.fs.FileStatus;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.apache.hadoop.fs.FileSystem;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.apache.hadoop.fs.Path;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.apache.hadoop.fs.PathFilter;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.apache.hadoop.hbase.Cell;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.apache.hadoop.hbase.CellScanner;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.apache.hadoop.hbase.CellUtil;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>import org.apache.hadoop.hbase.HConstants;<a name="line.60"></a>
+<span class="sourceLineNo">061</span>import org.apache.hadoop.hbase.TableName;<a name="line.61"></a>
+<span class="sourceLineNo">062</span>import org.apache.hadoop.hbase.client.Delete;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>import org.apache.hadoop.hbase.client.Durability;<a name="line.63"></a>
+<span class="sourceLineNo">064</span>import org.apache.hadoop.hbase.client.Mutation;<a name="line.64"></a>
+<span class="sourceLineNo">065</span>import org.apache.hadoop.hbase.client.Put;<a name="line.65"></a>
+<span class="sourceLineNo">066</span>import org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;<a name="line.66"></a>
+<span class="sourceLineNo">067</span>import org.apache.hadoop.hbase.io.HeapSize;<a name="line.67"></a>
+<span class="sourceLineNo">068</span>import org.apache.hadoop.hbase.log.HBaseMarkers;<a name="line.68"></a>
+<span class="sourceLineNo">069</span>import org.apache.hadoop.hbase.master.SplitLogManager;<a name="line.69"></a>
+<span class="sourceLineNo">070</span>import org.apache.hadoop.hbase.monitoring.MonitoredTask;<a name="line.70"></a>
+<span class="sourceLineNo">071</span>import org.apache.hadoop.hbase.monitoring.TaskMonitor;<a name="line.71"></a>
+<span class="sourceLineNo">072</span>import org.apache.hadoop.hbase.regionserver.HRegion;<a name="line.72"></a>
+<span class="sourceLineNo">073</span>import org.apache.hadoop.hbase.regionserver.LastSequenceId;<a name="line.73"></a>
+<span class="sourceLineNo">074</span>import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;<a name="line.75"></a>
+<span class="sourceLineNo">076</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.76"></a>
+<span class="sourceLineNo">077</span>import org.apache.hadoop.hbase.util.CancelableProgressable;<a name="line.77"></a>
+<span class="sourceLineNo">078</span>import org.apache.hadoop.hbase.util.ClassSize;<a name="line.78"></a>
+<span class="sourceLineNo">079</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.79"></a>
+<span class="sourceLineNo">080</span>import org.apache.hadoop.hbase.util.Pair;<a name="line.80"></a>
+<span class="sourceLineNo">081</span>import org.apache.hadoop.hbase.util.Threads;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>import org.apache.hadoop.hbase.wal.WAL.Entry;<a name="line.82"></a>
+<span class="sourceLineNo">083</span>import org.apache.hadoop.hbase.wal.WAL.Reader;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>import org.apache.hadoop.hbase.wal.WALProvider.Writer;<a name="line.84"></a>
+<span class="sourceLineNo">085</span>import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;<a name="line.85"></a>
+<span class="sourceLineNo">086</span>import org.apache.hadoop.io.MultipleIOException;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>import org.apache.hadoop.ipc.RemoteException;<a name="line.87"></a>
+<span class="sourceLineNo">088</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.88"></a>
+<span class="sourceLineNo">089</span>import org.slf4j.Logger;<a name="line.89"></a>
+<span class="sourceLineNo">090</span>import org.slf4j.LoggerFactory;<a name="line.90"></a>
+<span class="sourceLineNo">091</span><a name="line.91"></a>
+<span class="sourceLineNo">092</span>import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a name="line.93"></a>
+<span class="sourceLineNo">094</span>import org.apache.hbase.thirdparty.com.google.common.collect.Lists;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;<a name="line.95"></a>
+<span class="sourceLineNo">096</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.96"></a>
+<span class="sourceLineNo">097</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.97"></a>
 <span class="sourceLineNo">098</span><a name="line.98"></a>
 <span class="sourceLineNo">099</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;<a name="line.99"></a>
 <span class="sourceLineNo">100</span>import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;<a name="line.100"></a>


[21/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html b/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
index 0ef4c76..1c79f3b 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
@@ -32,123 +32,124 @@
 <span class="sourceLineNo">024</span>import java.util.List;<a name="line.24"></a>
 <span class="sourceLineNo">025</span>import java.util.Map;<a name="line.25"></a>
 <span class="sourceLineNo">026</span><a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.commons.collections.MapUtils;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.fs.FileStatus;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HBaseInterfaceAudience;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.TableNotFoundException;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.backup.BackupRestoreConstants;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.backup.impl.BackupManager;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.client.ConnectionFactory;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.master.MasterServices;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.apache.hadoop.hbase.master.cleaner.BaseLogCleanerDelegate;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.40"></a>
-<span class="sourceLineNo">041</span><a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.slf4j.Logger;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.slf4j.LoggerFactory;<a name="line.43"></a>
-<span class="sourceLineNo">044</span><a name="line.44"></a>
-<span class="sourceLineNo">045</span>/**<a name="line.45"></a>
-<span class="sourceLineNo">046</span> * Implementation of a log cleaner that checks if a log is still scheduled for incremental backup<a name="line.46"></a>
-<span class="sourceLineNo">047</span> * before deleting it when its TTL is over.<a name="line.47"></a>
-<span class="sourceLineNo">048</span> */<a name="line.48"></a>
-<span class="sourceLineNo">049</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.49"></a>
-<span class="sourceLineNo">050</span>public class BackupLogCleaner extends BaseLogCleanerDelegate {<a name="line.50"></a>
-<span class="sourceLineNo">051</span>  private static final Logger LOG = LoggerFactory.getLogger(BackupLogCleaner.class);<a name="line.51"></a>
-<span class="sourceLineNo">052</span><a name="line.52"></a>
-<span class="sourceLineNo">053</span>  private boolean stopped = false;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  private Connection conn;<a name="line.54"></a>
-<span class="sourceLineNo">055</span><a name="line.55"></a>
-<span class="sourceLineNo">056</span>  public BackupLogCleaner() {<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  }<a name="line.57"></a>
-<span class="sourceLineNo">058</span><a name="line.58"></a>
-<span class="sourceLineNo">059</span>  @Override<a name="line.59"></a>
-<span class="sourceLineNo">060</span>  public void init(Map&lt;String, Object&gt; params) {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    MasterServices master = (MasterServices) MapUtils.getObject(params,<a name="line.61"></a>
-<span class="sourceLineNo">062</span>      HMaster.MASTER);<a name="line.62"></a>
-<span class="sourceLineNo">063</span>    if (master != null) {<a name="line.63"></a>
-<span class="sourceLineNo">064</span>      conn = master.getConnection();<a name="line.64"></a>
-<span class="sourceLineNo">065</span>      if (getConf() == null) {<a name="line.65"></a>
-<span class="sourceLineNo">066</span>        super.setConf(conn.getConfiguration());<a name="line.66"></a>
-<span class="sourceLineNo">067</span>      }<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    }<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    if (conn == null) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      try {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>        conn = ConnectionFactory.createConnection(getConf());<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      } catch (IOException ioe) {<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        throw new RuntimeException("Failed to create connection", ioe);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      }<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    }<a name="line.75"></a>
-<span class="sourceLineNo">076</span>  }<a name="line.76"></a>
-<span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  @Override<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  public Iterable&lt;FileStatus&gt; getDeletableFiles(Iterable&lt;FileStatus&gt; files) {<a name="line.79"></a>
-<span class="sourceLineNo">080</span>    // all members of this class are null if backup is disabled,<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    // so we cannot filter the files<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    if (this.getConf() == null || !BackupManager.isBackupEnabled(getConf())) {<a name="line.82"></a>
-<span class="sourceLineNo">083</span>      LOG.debug("Backup is not enabled. Check your {} setting",<a name="line.83"></a>
-<span class="sourceLineNo">084</span>          BackupRestoreConstants.BACKUP_ENABLE_KEY);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>      return files;<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    }<a name="line.86"></a>
-<span class="sourceLineNo">087</span><a name="line.87"></a>
-<span class="sourceLineNo">088</span>    try (final BackupSystemTable table = new BackupSystemTable(conn)) {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>      // If we do not have recorded backup sessions<a name="line.89"></a>
-<span class="sourceLineNo">090</span>      try {<a name="line.90"></a>
-<span class="sourceLineNo">091</span>        if (!table.hasBackupSessions()) {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>          LOG.trace("BackupLogCleaner has no backup sessions");<a name="line.92"></a>
-<span class="sourceLineNo">093</span>          return files;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>        }<a name="line.94"></a>
-<span class="sourceLineNo">095</span>      } catch (TableNotFoundException tnfe) {<a name="line.95"></a>
-<span class="sourceLineNo">096</span>        LOG.warn("Backup system table is not available: {}", tnfe.getMessage());<a name="line.96"></a>
-<span class="sourceLineNo">097</span>        return files;<a name="line.97"></a>
-<span class="sourceLineNo">098</span>      }<a name="line.98"></a>
-<span class="sourceLineNo">099</span><a name="line.99"></a>
-<span class="sourceLineNo">100</span>      List&lt;FileStatus&gt; list = new ArrayList&lt;&gt;();<a name="line.100"></a>
-<span class="sourceLineNo">101</span>      Map&lt;FileStatus, Boolean&gt; walFilesDeletableMap = table.areWALFilesDeletable(files);<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      for (Map.Entry&lt;FileStatus, Boolean&gt; entry: walFilesDeletableMap.entrySet()) {<a name="line.102"></a>
-<span class="sourceLineNo">103</span>        FileStatus file = entry.getKey();<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        String wal = file.getPath().toString();<a name="line.104"></a>
-<span class="sourceLineNo">105</span>        boolean deletable = entry.getValue();<a name="line.105"></a>
-<span class="sourceLineNo">106</span>        if (deletable) {<a name="line.106"></a>
-<span class="sourceLineNo">107</span>          LOG.debug("Found log file in backup system table, deleting: {}", wal);<a name="line.107"></a>
-<span class="sourceLineNo">108</span>          list.add(file);<a name="line.108"></a>
-<span class="sourceLineNo">109</span>        } else {<a name="line.109"></a>
-<span class="sourceLineNo">110</span>          LOG.debug("Did not find this log in backup system table, keeping: {}", wal);<a name="line.110"></a>
-<span class="sourceLineNo">111</span>        }<a name="line.111"></a>
-<span class="sourceLineNo">112</span>      }<a name="line.112"></a>
-<span class="sourceLineNo">113</span>      return list;<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    } catch (IOException e) {<a name="line.114"></a>
-<span class="sourceLineNo">115</span>      LOG.error("Failed to get backup system table table, therefore will keep all files", e);<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      // nothing to delete<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      return Collections.emptyList();<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
-<span class="sourceLineNo">119</span>  }<a name="line.119"></a>
-<span class="sourceLineNo">120</span><a name="line.120"></a>
-<span class="sourceLineNo">121</span>  @Override<a name="line.121"></a>
-<span class="sourceLineNo">122</span>  public void setConf(Configuration config) {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    // If backup is disabled, keep all members null<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    super.setConf(config);<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    if (!config.getBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY,<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      BackupRestoreConstants.BACKUP_ENABLE_DEFAULT)) {<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      LOG.warn("Backup is disabled - allowing all wals to be deleted");<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
-<span class="sourceLineNo">129</span>  }<a name="line.129"></a>
-<span class="sourceLineNo">130</span><a name="line.130"></a>
-<span class="sourceLineNo">131</span>  @Override<a name="line.131"></a>
-<span class="sourceLineNo">132</span>  public void stop(String why) {<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    if (!this.stopped) {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      this.stopped = true;<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      LOG.info("Stopping BackupLogCleaner");<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  @Override<a name="line.139"></a>
-<span class="sourceLineNo">140</span>  public boolean isStopped() {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    return this.stopped;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
-<span class="sourceLineNo">143</span>}<a name="line.143"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.conf.Configuration;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.fs.FileStatus;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HBaseInterfaceAudience;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.TableNotFoundException;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.backup.BackupRestoreConstants;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.backup.impl.BackupManager;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.client.Connection;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.client.ConnectionFactory;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.apache.hadoop.hbase.master.HMaster;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.apache.hadoop.hbase.master.MasterServices;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.hbase.master.cleaner.BaseLogCleanerDelegate;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.39"></a>
+<span class="sourceLineNo">040</span><a name="line.40"></a>
+<span class="sourceLineNo">041</span>import org.slf4j.Logger;<a name="line.41"></a>
+<span class="sourceLineNo">042</span>import org.slf4j.LoggerFactory;<a name="line.42"></a>
+<span class="sourceLineNo">043</span><a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;<a name="line.44"></a>
+<span class="sourceLineNo">045</span><a name="line.45"></a>
+<span class="sourceLineNo">046</span>/**<a name="line.46"></a>
+<span class="sourceLineNo">047</span> * Implementation of a log cleaner that checks if a log is still scheduled for incremental backup<a name="line.47"></a>
+<span class="sourceLineNo">048</span> * before deleting it when its TTL is over.<a name="line.48"></a>
+<span class="sourceLineNo">049</span> */<a name="line.49"></a>
+<span class="sourceLineNo">050</span>@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)<a name="line.50"></a>
+<span class="sourceLineNo">051</span>public class BackupLogCleaner extends BaseLogCleanerDelegate {<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  private static final Logger LOG = LoggerFactory.getLogger(BackupLogCleaner.class);<a name="line.52"></a>
+<span class="sourceLineNo">053</span><a name="line.53"></a>
+<span class="sourceLineNo">054</span>  private boolean stopped = false;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>  private Connection conn;<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  public BackupLogCleaner() {<a name="line.57"></a>
+<span class="sourceLineNo">058</span>  }<a name="line.58"></a>
+<span class="sourceLineNo">059</span><a name="line.59"></a>
+<span class="sourceLineNo">060</span>  @Override<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  public void init(Map&lt;String, Object&gt; params) {<a name="line.61"></a>
+<span class="sourceLineNo">062</span>    MasterServices master = (MasterServices) MapUtils.getObject(params,<a name="line.62"></a>
+<span class="sourceLineNo">063</span>      HMaster.MASTER);<a name="line.63"></a>
+<span class="sourceLineNo">064</span>    if (master != null) {<a name="line.64"></a>
+<span class="sourceLineNo">065</span>      conn = master.getConnection();<a name="line.65"></a>
+<span class="sourceLineNo">066</span>      if (getConf() == null) {<a name="line.66"></a>
+<span class="sourceLineNo">067</span>        super.setConf(conn.getConfiguration());<a name="line.67"></a>
+<span class="sourceLineNo">068</span>      }<a name="line.68"></a>
+<span class="sourceLineNo">069</span>    }<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    if (conn == null) {<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      try {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>        conn = ConnectionFactory.createConnection(getConf());<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      } catch (IOException ioe) {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>        throw new RuntimeException("Failed to create connection", ioe);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>      }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    }<a name="line.76"></a>
+<span class="sourceLineNo">077</span>  }<a name="line.77"></a>
+<span class="sourceLineNo">078</span><a name="line.78"></a>
+<span class="sourceLineNo">079</span>  @Override<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  public Iterable&lt;FileStatus&gt; getDeletableFiles(Iterable&lt;FileStatus&gt; files) {<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    // all members of this class are null if backup is disabled,<a name="line.81"></a>
+<span class="sourceLineNo">082</span>    // so we cannot filter the files<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    if (this.getConf() == null || !BackupManager.isBackupEnabled(getConf())) {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>      LOG.debug("Backup is not enabled. Check your {} setting",<a name="line.84"></a>
+<span class="sourceLineNo">085</span>          BackupRestoreConstants.BACKUP_ENABLE_KEY);<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      return files;<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    }<a name="line.87"></a>
+<span class="sourceLineNo">088</span><a name="line.88"></a>
+<span class="sourceLineNo">089</span>    try (final BackupSystemTable table = new BackupSystemTable(conn)) {<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      // If we do not have recorded backup sessions<a name="line.90"></a>
+<span class="sourceLineNo">091</span>      try {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>        if (!table.hasBackupSessions()) {<a name="line.92"></a>
+<span class="sourceLineNo">093</span>          LOG.trace("BackupLogCleaner has no backup sessions");<a name="line.93"></a>
+<span class="sourceLineNo">094</span>          return files;<a name="line.94"></a>
+<span class="sourceLineNo">095</span>        }<a name="line.95"></a>
+<span class="sourceLineNo">096</span>      } catch (TableNotFoundException tnfe) {<a name="line.96"></a>
+<span class="sourceLineNo">097</span>        LOG.warn("Backup system table is not available: {}", tnfe.getMessage());<a name="line.97"></a>
+<span class="sourceLineNo">098</span>        return files;<a name="line.98"></a>
+<span class="sourceLineNo">099</span>      }<a name="line.99"></a>
+<span class="sourceLineNo">100</span><a name="line.100"></a>
+<span class="sourceLineNo">101</span>      List&lt;FileStatus&gt; list = new ArrayList&lt;&gt;();<a name="line.101"></a>
+<span class="sourceLineNo">102</span>      Map&lt;FileStatus, Boolean&gt; walFilesDeletableMap = table.areWALFilesDeletable(files);<a name="line.102"></a>
+<span class="sourceLineNo">103</span>      for (Map.Entry&lt;FileStatus, Boolean&gt; entry: walFilesDeletableMap.entrySet()) {<a name="line.103"></a>
+<span class="sourceLineNo">104</span>        FileStatus file = entry.getKey();<a name="line.104"></a>
+<span class="sourceLineNo">105</span>        String wal = file.getPath().toString();<a name="line.105"></a>
+<span class="sourceLineNo">106</span>        boolean deletable = entry.getValue();<a name="line.106"></a>
+<span class="sourceLineNo">107</span>        if (deletable) {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>          LOG.debug("Found log file in backup system table, deleting: {}", wal);<a name="line.108"></a>
+<span class="sourceLineNo">109</span>          list.add(file);<a name="line.109"></a>
+<span class="sourceLineNo">110</span>        } else {<a name="line.110"></a>
+<span class="sourceLineNo">111</span>          LOG.debug("Did not find this log in backup system table, keeping: {}", wal);<a name="line.111"></a>
+<span class="sourceLineNo">112</span>        }<a name="line.112"></a>
+<span class="sourceLineNo">113</span>      }<a name="line.113"></a>
+<span class="sourceLineNo">114</span>      return list;<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    } catch (IOException e) {<a name="line.115"></a>
+<span class="sourceLineNo">116</span>      LOG.error("Failed to get backup system table table, therefore will keep all files", e);<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      // nothing to delete<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      return Collections.emptyList();<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
+<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
+<span class="sourceLineNo">121</span><a name="line.121"></a>
+<span class="sourceLineNo">122</span>  @Override<a name="line.122"></a>
+<span class="sourceLineNo">123</span>  public void setConf(Configuration config) {<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    // If backup is disabled, keep all members null<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    super.setConf(config);<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    if (!config.getBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY,<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      BackupRestoreConstants.BACKUP_ENABLE_DEFAULT)) {<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      LOG.warn("Backup is disabled - allowing all wals to be deleted");<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    }<a name="line.129"></a>
+<span class="sourceLineNo">130</span>  }<a name="line.130"></a>
+<span class="sourceLineNo">131</span><a name="line.131"></a>
+<span class="sourceLineNo">132</span>  @Override<a name="line.132"></a>
+<span class="sourceLineNo">133</span>  public void stop(String why) {<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    if (!this.stopped) {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      this.stopped = true;<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      LOG.info("Stopping BackupLogCleaner");<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    }<a name="line.137"></a>
+<span class="sourceLineNo">138</span>  }<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>  @Override<a name="line.140"></a>
+<span class="sourceLineNo">141</span>  public boolean isStopped() {<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    return this.stopped;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>  }<a name="line.143"></a>
+<span class="sourceLineNo">144</span>}<a name="line.144"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html b/devapidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html
index 76c6859..df06090 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/RowMutations.html
@@ -31,164 +31,165 @@
 <span class="sourceLineNo">023</span>import java.util.Collections;<a name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.util.List;<a name="line.24"></a>
 <span class="sourceLineNo">025</span><a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.commons.collections.CollectionUtils;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.28"></a>
-<span class="sourceLineNo">029</span><a name="line.29"></a>
-<span class="sourceLineNo">030</span>/**<a name="line.30"></a>
-<span class="sourceLineNo">031</span> * Performs multiple mutations atomically on a single row.<a name="line.31"></a>
-<span class="sourceLineNo">032</span> * Currently {@link Put} and {@link Delete} are supported.<a name="line.32"></a>
-<span class="sourceLineNo">033</span> *<a name="line.33"></a>
-<span class="sourceLineNo">034</span> * The mutations are performed in the order in which they<a name="line.34"></a>
-<span class="sourceLineNo">035</span> * were added.<a name="line.35"></a>
-<span class="sourceLineNo">036</span> *<a name="line.36"></a>
-<span class="sourceLineNo">037</span> * &lt;p&gt;We compare and equate mutations based off their row so be careful putting RowMutations<a name="line.37"></a>
-<span class="sourceLineNo">038</span> * into Sets or using them as keys in Maps.<a name="line.38"></a>
-<span class="sourceLineNo">039</span> */<a name="line.39"></a>
-<span class="sourceLineNo">040</span>@InterfaceAudience.Public<a name="line.40"></a>
-<span class="sourceLineNo">041</span>public class RowMutations implements Row {<a name="line.41"></a>
-<span class="sourceLineNo">042</span><a name="line.42"></a>
-<span class="sourceLineNo">043</span>  /**<a name="line.43"></a>
-<span class="sourceLineNo">044</span>   * Create a {@link RowMutations} with the specified mutations.<a name="line.44"></a>
-<span class="sourceLineNo">045</span>   * @param mutations the mutations to send<a name="line.45"></a>
-<span class="sourceLineNo">046</span>   * @return RowMutations<a name="line.46"></a>
-<span class="sourceLineNo">047</span>   * @throws IOException if any row in mutations is different to another<a name="line.47"></a>
-<span class="sourceLineNo">048</span>   */<a name="line.48"></a>
-<span class="sourceLineNo">049</span>  public static RowMutations of(List&lt;? extends Mutation&gt; mutations) throws IOException {<a name="line.49"></a>
-<span class="sourceLineNo">050</span>    if (CollectionUtils.isEmpty(mutations)) {<a name="line.50"></a>
-<span class="sourceLineNo">051</span>      throw new IllegalArgumentException("Cannot instantiate a RowMutations by empty list");<a name="line.51"></a>
-<span class="sourceLineNo">052</span>    }<a name="line.52"></a>
-<span class="sourceLineNo">053</span>    return new RowMutations(mutations.get(0).getRow(), mutations.size())<a name="line.53"></a>
-<span class="sourceLineNo">054</span>        .add(mutations);<a name="line.54"></a>
-<span class="sourceLineNo">055</span>  }<a name="line.55"></a>
-<span class="sourceLineNo">056</span><a name="line.56"></a>
-<span class="sourceLineNo">057</span>  private final List&lt;Mutation&gt; mutations;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  private final byte [] row;<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  public RowMutations(byte [] row) {<a name="line.60"></a>
-<span class="sourceLineNo">061</span>    this(row, -1);<a name="line.61"></a>
-<span class="sourceLineNo">062</span>  }<a name="line.62"></a>
-<span class="sourceLineNo">063</span>  /**<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * Create an atomic mutation for the specified row.<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   * @param row row key<a name="line.65"></a>
-<span class="sourceLineNo">066</span>   * @param initialCapacity the initial capacity of the RowMutations<a name="line.66"></a>
-<span class="sourceLineNo">067</span>   */<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  public RowMutations(byte [] row, int initialCapacity) {<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    this.row = Bytes.copy(Mutation.checkRow(row));<a name="line.69"></a>
-<span class="sourceLineNo">070</span>    if (initialCapacity &lt;= 0) {<a name="line.70"></a>
-<span class="sourceLineNo">071</span>      this.mutations = new ArrayList&lt;&gt;();<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    } else {<a name="line.72"></a>
-<span class="sourceLineNo">073</span>      this.mutations = new ArrayList&lt;&gt;(initialCapacity);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    }<a name="line.74"></a>
-<span class="sourceLineNo">075</span>  }<a name="line.75"></a>
-<span class="sourceLineNo">076</span><a name="line.76"></a>
-<span class="sourceLineNo">077</span>  /**<a name="line.77"></a>
-<span class="sourceLineNo">078</span>   * Add a {@link Put} operation to the list of mutations<a name="line.78"></a>
-<span class="sourceLineNo">079</span>   * @param p The {@link Put} to add<a name="line.79"></a>
-<span class="sourceLineNo">080</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.80"></a>
-<span class="sourceLineNo">081</span>   * @deprecated since 2.0 version and will be removed in 3.0 version.<a name="line.81"></a>
-<span class="sourceLineNo">082</span>   *             use {@link #add(Mutation)}<a name="line.82"></a>
-<span class="sourceLineNo">083</span>   */<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  @Deprecated<a name="line.84"></a>
-<span class="sourceLineNo">085</span>  public void add(Put p) throws IOException {<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    add((Mutation) p);<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  }<a name="line.87"></a>
-<span class="sourceLineNo">088</span><a name="line.88"></a>
-<span class="sourceLineNo">089</span>  /**<a name="line.89"></a>
-<span class="sourceLineNo">090</span>   * Add a {@link Delete} operation to the list of mutations<a name="line.90"></a>
-<span class="sourceLineNo">091</span>   * @param d The {@link Delete} to add<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * @deprecated since 2.0 version and will be removed in 3.0 version.<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   *             use {@link #add(Mutation)}<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
-<span class="sourceLineNo">096</span>  @Deprecated<a name="line.96"></a>
-<span class="sourceLineNo">097</span>  public void add(Delete d) throws IOException {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    add((Mutation) d);<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  /**<a name="line.101"></a>
-<span class="sourceLineNo">102</span>   * Currently only supports {@link Put} and {@link Delete} mutations.<a name="line.102"></a>
-<span class="sourceLineNo">103</span>   *<a name="line.103"></a>
-<span class="sourceLineNo">104</span>   * @param mutation The data to send.<a name="line.104"></a>
-<span class="sourceLineNo">105</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.105"></a>
-<span class="sourceLineNo">106</span>   */<a name="line.106"></a>
-<span class="sourceLineNo">107</span>  public RowMutations add(Mutation mutation) throws IOException {<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    return add(Collections.singletonList(mutation));<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  /**<a name="line.111"></a>
-<span class="sourceLineNo">112</span>   * Currently only supports {@link Put} and {@link Delete} mutations.<a name="line.112"></a>
-<span class="sourceLineNo">113</span>   *<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   * @param mutations The data to send.<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.115"></a>
-<span class="sourceLineNo">116</span>   */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  public RowMutations add(List&lt;? extends Mutation&gt; mutations) throws IOException {<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    for (Mutation mutation : mutations) {<a name="line.118"></a>
-<span class="sourceLineNo">119</span>      if (!Bytes.equals(row, mutation.getRow())) {<a name="line.119"></a>
-<span class="sourceLineNo">120</span>        throw new WrongRowIOException("The row in the recently added Put/Delete &lt;" +<a name="line.120"></a>
-<span class="sourceLineNo">121</span>          Bytes.toStringBinary(mutation.getRow()) + "&gt; doesn't match the original one &lt;" +<a name="line.121"></a>
-<span class="sourceLineNo">122</span>          Bytes.toStringBinary(this.row) + "&gt;");<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      }<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    }<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    this.mutations.addAll(mutations);<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    return this;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>  }<a name="line.127"></a>
-<span class="sourceLineNo">128</span><a name="line.128"></a>
-<span class="sourceLineNo">129</span>  /**<a name="line.129"></a>
-<span class="sourceLineNo">130</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.130"></a>
-<span class="sourceLineNo">131</span>   *             Use {@link Row#COMPARATOR} instead<a name="line.131"></a>
-<span class="sourceLineNo">132</span>   */<a name="line.132"></a>
-<span class="sourceLineNo">133</span>  @Deprecated<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  @Override<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  public int compareTo(Row i) {<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    return Bytes.compareTo(this.getRow(), i.getRow());<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  /**<a name="line.139"></a>
-<span class="sourceLineNo">140</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.140"></a>
-<span class="sourceLineNo">141</span>   *             No replacement<a name="line.141"></a>
-<span class="sourceLineNo">142</span>   */<a name="line.142"></a>
-<span class="sourceLineNo">143</span>  @Deprecated<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  @Override<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  public boolean equals(Object obj) {<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    if (obj == this) return true;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    if (obj instanceof RowMutations) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>      RowMutations other = (RowMutations)obj;<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      return compareTo(other) == 0;<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    return false;<a name="line.151"></a>
-<span class="sourceLineNo">152</span>  }<a name="line.152"></a>
-<span class="sourceLineNo">153</span><a name="line.153"></a>
-<span class="sourceLineNo">154</span>  /**<a name="line.154"></a>
-<span class="sourceLineNo">155</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.155"></a>
-<span class="sourceLineNo">156</span>   *             No replacement<a name="line.156"></a>
-<span class="sourceLineNo">157</span>   */<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  @Deprecated<a name="line.158"></a>
-<span class="sourceLineNo">159</span>  @Override<a name="line.159"></a>
-<span class="sourceLineNo">160</span>  public int hashCode(){<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    return Arrays.hashCode(row);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>  }<a name="line.162"></a>
-<span class="sourceLineNo">163</span><a name="line.163"></a>
-<span class="sourceLineNo">164</span>  @Override<a name="line.164"></a>
-<span class="sourceLineNo">165</span>  public byte[] getRow() {<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    return row;<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  }<a name="line.167"></a>
-<span class="sourceLineNo">168</span><a name="line.168"></a>
-<span class="sourceLineNo">169</span>  /**<a name="line.169"></a>
-<span class="sourceLineNo">170</span>   * @return An unmodifiable list of the current mutations.<a name="line.170"></a>
-<span class="sourceLineNo">171</span>   */<a name="line.171"></a>
-<span class="sourceLineNo">172</span>  public List&lt;Mutation&gt; getMutations() {<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    return Collections.unmodifiableList(mutations);<a name="line.173"></a>
-<span class="sourceLineNo">174</span>  }<a name="line.174"></a>
-<span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span>  public int getMaxPriority() {<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    int maxPriority = Integer.MIN_VALUE;<a name="line.177"></a>
-<span class="sourceLineNo">178</span>    for (Mutation mutation : mutations) {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      maxPriority = Math.max(maxPriority, mutation.getPriority());<a name="line.179"></a>
-<span class="sourceLineNo">180</span>    }<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    return maxPriority;<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  }<a name="line.182"></a>
-<span class="sourceLineNo">183</span>}<a name="line.183"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.27"></a>
+<span class="sourceLineNo">028</span><a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;<a name="line.29"></a>
+<span class="sourceLineNo">030</span><a name="line.30"></a>
+<span class="sourceLineNo">031</span>/**<a name="line.31"></a>
+<span class="sourceLineNo">032</span> * Performs multiple mutations atomically on a single row.<a name="line.32"></a>
+<span class="sourceLineNo">033</span> * Currently {@link Put} and {@link Delete} are supported.<a name="line.33"></a>
+<span class="sourceLineNo">034</span> *<a name="line.34"></a>
+<span class="sourceLineNo">035</span> * The mutations are performed in the order in which they<a name="line.35"></a>
+<span class="sourceLineNo">036</span> * were added.<a name="line.36"></a>
+<span class="sourceLineNo">037</span> *<a name="line.37"></a>
+<span class="sourceLineNo">038</span> * &lt;p&gt;We compare and equate mutations based off their row so be careful putting RowMutations<a name="line.38"></a>
+<span class="sourceLineNo">039</span> * into Sets or using them as keys in Maps.<a name="line.39"></a>
+<span class="sourceLineNo">040</span> */<a name="line.40"></a>
+<span class="sourceLineNo">041</span>@InterfaceAudience.Public<a name="line.41"></a>
+<span class="sourceLineNo">042</span>public class RowMutations implements Row {<a name="line.42"></a>
+<span class="sourceLineNo">043</span><a name="line.43"></a>
+<span class="sourceLineNo">044</span>  /**<a name="line.44"></a>
+<span class="sourceLineNo">045</span>   * Create a {@link RowMutations} with the specified mutations.<a name="line.45"></a>
+<span class="sourceLineNo">046</span>   * @param mutations the mutations to send<a name="line.46"></a>
+<span class="sourceLineNo">047</span>   * @return RowMutations<a name="line.47"></a>
+<span class="sourceLineNo">048</span>   * @throws IOException if any row in mutations is different to another<a name="line.48"></a>
+<span class="sourceLineNo">049</span>   */<a name="line.49"></a>
+<span class="sourceLineNo">050</span>  public static RowMutations of(List&lt;? extends Mutation&gt; mutations) throws IOException {<a name="line.50"></a>
+<span class="sourceLineNo">051</span>    if (CollectionUtils.isEmpty(mutations)) {<a name="line.51"></a>
+<span class="sourceLineNo">052</span>      throw new IllegalArgumentException("Cannot instantiate a RowMutations by empty list");<a name="line.52"></a>
+<span class="sourceLineNo">053</span>    }<a name="line.53"></a>
+<span class="sourceLineNo">054</span>    return new RowMutations(mutations.get(0).getRow(), mutations.size())<a name="line.54"></a>
+<span class="sourceLineNo">055</span>        .add(mutations);<a name="line.55"></a>
+<span class="sourceLineNo">056</span>  }<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>  private final List&lt;Mutation&gt; mutations;<a name="line.58"></a>
+<span class="sourceLineNo">059</span>  private final byte [] row;<a name="line.59"></a>
+<span class="sourceLineNo">060</span><a name="line.60"></a>
+<span class="sourceLineNo">061</span>  public RowMutations(byte [] row) {<a name="line.61"></a>
+<span class="sourceLineNo">062</span>    this(row, -1);<a name="line.62"></a>
+<span class="sourceLineNo">063</span>  }<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  /**<a name="line.64"></a>
+<span class="sourceLineNo">065</span>   * Create an atomic mutation for the specified row.<a name="line.65"></a>
+<span class="sourceLineNo">066</span>   * @param row row key<a name="line.66"></a>
+<span class="sourceLineNo">067</span>   * @param initialCapacity the initial capacity of the RowMutations<a name="line.67"></a>
+<span class="sourceLineNo">068</span>   */<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  public RowMutations(byte [] row, int initialCapacity) {<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    this.row = Bytes.copy(Mutation.checkRow(row));<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    if (initialCapacity &lt;= 0) {<a name="line.71"></a>
+<span class="sourceLineNo">072</span>      this.mutations = new ArrayList&lt;&gt;();<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    } else {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      this.mutations = new ArrayList&lt;&gt;(initialCapacity);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>  }<a name="line.76"></a>
+<span class="sourceLineNo">077</span><a name="line.77"></a>
+<span class="sourceLineNo">078</span>  /**<a name="line.78"></a>
+<span class="sourceLineNo">079</span>   * Add a {@link Put} operation to the list of mutations<a name="line.79"></a>
+<span class="sourceLineNo">080</span>   * @param p The {@link Put} to add<a name="line.80"></a>
+<span class="sourceLineNo">081</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.81"></a>
+<span class="sourceLineNo">082</span>   * @deprecated since 2.0 version and will be removed in 3.0 version.<a name="line.82"></a>
+<span class="sourceLineNo">083</span>   *             use {@link #add(Mutation)}<a name="line.83"></a>
+<span class="sourceLineNo">084</span>   */<a name="line.84"></a>
+<span class="sourceLineNo">085</span>  @Deprecated<a name="line.85"></a>
+<span class="sourceLineNo">086</span>  public void add(Put p) throws IOException {<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    add((Mutation) p);<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  }<a name="line.88"></a>
+<span class="sourceLineNo">089</span><a name="line.89"></a>
+<span class="sourceLineNo">090</span>  /**<a name="line.90"></a>
+<span class="sourceLineNo">091</span>   * Add a {@link Delete} operation to the list of mutations<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   * @param d The {@link Delete} to add<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.93"></a>
+<span class="sourceLineNo">094</span>   * @deprecated since 2.0 version and will be removed in 3.0 version.<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   *             use {@link #add(Mutation)}<a name="line.95"></a>
+<span class="sourceLineNo">096</span>   */<a name="line.96"></a>
+<span class="sourceLineNo">097</span>  @Deprecated<a name="line.97"></a>
+<span class="sourceLineNo">098</span>  public void add(Delete d) throws IOException {<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    add((Mutation) d);<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>  /**<a name="line.102"></a>
+<span class="sourceLineNo">103</span>   * Currently only supports {@link Put} and {@link Delete} mutations.<a name="line.103"></a>
+<span class="sourceLineNo">104</span>   *<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * @param mutation The data to send.<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.106"></a>
+<span class="sourceLineNo">107</span>   */<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  public RowMutations add(Mutation mutation) throws IOException {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    return add(Collections.singletonList(mutation));<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  }<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>  /**<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * Currently only supports {@link Put} and {@link Delete} mutations.<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   *<a name="line.114"></a>
+<span class="sourceLineNo">115</span>   * @param mutations The data to send.<a name="line.115"></a>
+<span class="sourceLineNo">116</span>   * @throws IOException if the row of added mutation doesn't match the original row<a name="line.116"></a>
+<span class="sourceLineNo">117</span>   */<a name="line.117"></a>
+<span class="sourceLineNo">118</span>  public RowMutations add(List&lt;? extends Mutation&gt; mutations) throws IOException {<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    for (Mutation mutation : mutations) {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>      if (!Bytes.equals(row, mutation.getRow())) {<a name="line.120"></a>
+<span class="sourceLineNo">121</span>        throw new WrongRowIOException("The row in the recently added Put/Delete &lt;" +<a name="line.121"></a>
+<span class="sourceLineNo">122</span>          Bytes.toStringBinary(mutation.getRow()) + "&gt; doesn't match the original one &lt;" +<a name="line.122"></a>
+<span class="sourceLineNo">123</span>          Bytes.toStringBinary(this.row) + "&gt;");<a name="line.123"></a>
+<span class="sourceLineNo">124</span>      }<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    }<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    this.mutations.addAll(mutations);<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    return this;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
+<span class="sourceLineNo">129</span><a name="line.129"></a>
+<span class="sourceLineNo">130</span>  /**<a name="line.130"></a>
+<span class="sourceLineNo">131</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.131"></a>
+<span class="sourceLineNo">132</span>   *             Use {@link Row#COMPARATOR} instead<a name="line.132"></a>
+<span class="sourceLineNo">133</span>   */<a name="line.133"></a>
+<span class="sourceLineNo">134</span>  @Deprecated<a name="line.134"></a>
+<span class="sourceLineNo">135</span>  @Override<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  public int compareTo(Row i) {<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    return Bytes.compareTo(this.getRow(), i.getRow());<a name="line.137"></a>
+<span class="sourceLineNo">138</span>  }<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>  /**<a name="line.140"></a>
+<span class="sourceLineNo">141</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.141"></a>
+<span class="sourceLineNo">142</span>   *             No replacement<a name="line.142"></a>
+<span class="sourceLineNo">143</span>   */<a name="line.143"></a>
+<span class="sourceLineNo">144</span>  @Deprecated<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  @Override<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  public boolean equals(Object obj) {<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    if (obj == this) return true;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    if (obj instanceof RowMutations) {<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      RowMutations other = (RowMutations)obj;<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      return compareTo(other) == 0;<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    }<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    return false;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>  /**<a name="line.155"></a>
+<span class="sourceLineNo">156</span>   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.<a name="line.156"></a>
+<span class="sourceLineNo">157</span>   *             No replacement<a name="line.157"></a>
+<span class="sourceLineNo">158</span>   */<a name="line.158"></a>
+<span class="sourceLineNo">159</span>  @Deprecated<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  @Override<a name="line.160"></a>
+<span class="sourceLineNo">161</span>  public int hashCode(){<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    return Arrays.hashCode(row);<a name="line.162"></a>
+<span class="sourceLineNo">163</span>  }<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>  @Override<a name="line.165"></a>
+<span class="sourceLineNo">166</span>  public byte[] getRow() {<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    return row;<a name="line.167"></a>
+<span class="sourceLineNo">168</span>  }<a name="line.168"></a>
+<span class="sourceLineNo">169</span><a name="line.169"></a>
+<span class="sourceLineNo">170</span>  /**<a name="line.170"></a>
+<span class="sourceLineNo">171</span>   * @return An unmodifiable list of the current mutations.<a name="line.171"></a>
+<span class="sourceLineNo">172</span>   */<a name="line.172"></a>
+<span class="sourceLineNo">173</span>  public List&lt;Mutation&gt; getMutations() {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    return Collections.unmodifiableList(mutations);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>  }<a name="line.175"></a>
+<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">177</span>  public int getMaxPriority() {<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    int maxPriority = Integer.MIN_VALUE;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    for (Mutation mutation : mutations) {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      maxPriority = Math.max(maxPriority, mutation.getPriority());<a name="line.180"></a>
+<span class="sourceLineNo">181</span>    }<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    return maxPriority;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  }<a name="line.183"></a>
+<span class="sourceLineNo">184</span>}<a name="line.184"></a>
 
 
 


[22/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html b/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
index 7b4bdcd..a5b07e4 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.40">RegionSizeReportingChore</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.41">RegionSizeReportingChore</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a></pre>
 <div class="block">A Chore which sends the region size reports on this RegionServer to the Master.</div>
 </li>
@@ -141,7 +141,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <th class="colLast" scope="col">Field and Description</th>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
@@ -280,7 +280,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.41">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.42">LOG</a></pre>
 </li>
 </ul>
 <a name="REGION_SIZE_REPORTING_CHORE_PERIOD_KEY">
@@ -289,7 +289,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>REGION_SIZE_REPORTING_CHORE_PERIOD_KEY</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.43">REGION_SIZE_REPORTING_CHORE_PERIOD_KEY</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.44">REGION_SIZE_REPORTING_CHORE_PERIOD_KEY</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.quotas.RegionSizeReportingChore.REGION_SIZE_REPORTING_CHORE_PERIOD_KEY">Constant Field Values</a></dd>
@@ -302,7 +302,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT</h4>
-<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.45">REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT</a></pre>
+<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.46">REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.quotas.RegionSizeReportingChore.REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT">Constant Field Values</a></dd>
@@ -315,7 +315,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>REGION_SIZE_REPORTING_CHORE_DELAY_KEY</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.47">REGION_SIZE_REPORTING_CHORE_DELAY_KEY</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.48">REGION_SIZE_REPORTING_CHORE_DELAY_KEY</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.quotas.RegionSizeReportingChore.REGION_SIZE_REPORTING_CHORE_DELAY_KEY">Constant Field Values</a></dd>
@@ -328,7 +328,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT</h4>
-<pre>static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.49">REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT</a></pre>
+<pre>static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.50">REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.quotas.RegionSizeReportingChore.REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT">Constant Field Values</a></dd>
@@ -341,7 +341,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.51">REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.52">REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.quotas.RegionSizeReportingChore.REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY">Constant Field Values</a></dd>
@@ -354,7 +354,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT</h4>
-<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.53">REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT</a></pre>
+<pre>static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.54">REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT</a></pre>
 </li>
 </ul>
 <a name="rsServices">
@@ -363,7 +363,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>rsServices</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.55">rsServices</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.56">rsServices</a></pre>
 </li>
 </ul>
 <a name="metrics">
@@ -372,7 +372,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>metrics</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.56">metrics</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.57">metrics</a></pre>
 </li>
 </ul>
 </li>
@@ -389,7 +389,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RegionSizeReportingChore</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.58">RegionSizeReportingChore</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;rsServices)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.59">RegionSizeReportingChore</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a>&nbsp;rsServices)</pre>
 </li>
 </ul>
 </li>
@@ -406,7 +406,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>chore</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.68">chore</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.69">chore</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html#chore--">ScheduledChore</a></code></span></div>
 <div class="block">The task to execute on each scheduled execution of the Chore</div>
 <dl>
@@ -421,7 +421,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>_chore</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.80">_chore</a>()</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.81">_chore</a>()</pre>
 </li>
 </ul>
 <a name="getOnlineRegionInfos-java.util.List-">
@@ -430,7 +430,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnlineRegionInfos</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashSet.html?is-external=true" title="class or interface in java.util">HashSet</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.91">getOnlineRegionInfos</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.html" title="interface in org.apache.hadoop.hbase.regionserver">Region</a>&gt;&nbsp;onlineRegions)</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/HashSet.html?is-external=true" title="class or interface in java.util">HashSet</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.92">getOnlineRegionInfos</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;? extends <a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.html" title="interface in org.apache.hadoop.hbase.regionserver">Region</a>&gt;&nbsp;onlineRegions)</pre>
 </li>
 </ul>
 <a name="removeNonOnlineRegions-org.apache.hadoop.hbase.quotas.RegionSizeStore-java.util.Set-">
@@ -439,7 +439,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>removeNonOnlineRegions</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.97">removeNonOnlineRegions</a>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;store,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.98">removeNonOnlineRegions</a>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;store,
                             <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true" title="class or interface in java.util">Set</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&gt;&nbsp;onlineRegions)</pre>
 </li>
 </ul>
@@ -449,7 +449,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>getPeriod</h4>
-<pre>static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.128">getPeriod</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>static&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.129">getPeriod</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Extracts the period for the chore from the configuration.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -465,7 +465,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>getInitialDelay</h4>
-<pre>static&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.139">getInitialDelay</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>static&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.140">getInitialDelay</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Extracts the initial delay for the chore from the configuration.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -481,7 +481,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getTimeUnit</h4>
-<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true" title="class or interface in java.util.concurrent">TimeUnit</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.152">getTimeUnit</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true" title="class or interface in java.util.concurrent">TimeUnit</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#line.153">getTimeUnit</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Extracts the time unit for the chore period and initial delay from the configuration. The
  configuration value for <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html#REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY"><code>REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY</code></a> must correspond to a
  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true" title="class or interface in java.util.concurrent"><code>TimeUnit</code></a> value.</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html b/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html
index 8535053..2176d16 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.36">RegionSizeStoreImpl</a>
+public class <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.37">RegionSizeStoreImpl</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a></pre>
 <div class="block">A <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas"><code>RegionSizeStore</code></a> implementation backed by a ConcurrentHashMap. We expected similar
@@ -139,7 +139,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <th class="colLast" scope="col">Field and Description</th>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
@@ -269,7 +269,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.37">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.38">LOG</a></pre>
 </li>
 </ul>
 <a name="sizeOfEntry">
@@ -278,7 +278,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockList">
 <li class="blockList">
 <h4>sizeOfEntry</h4>
-<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.38">sizeOfEntry</a></pre>
+<pre>private static final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.39">sizeOfEntry</a></pre>
 </li>
 </ul>
 <a name="store">
@@ -287,7 +287,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockListLast">
 <li class="blockList">
 <h4>store</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSize.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSize</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.43">store</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentHashMap.html?is-external=true" title="class or interface in java.util.concurrent">ConcurrentHashMap</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSize.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSize</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.44">store</a></pre>
 </li>
 </ul>
 </li>
@@ -304,7 +304,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockListLast">
 <li class="blockList">
 <h4>RegionSizeStoreImpl</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.45">RegionSizeStoreImpl</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.46">RegionSizeStoreImpl</a>()</pre>
 </li>
 </ul>
 </li>
@@ -321,7 +321,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockList">
 <li class="blockList">
 <h4>iterator</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSize.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSize</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.50">iterator</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true" title="class or interface in java.util">Iterator</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSize.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSize</a>&gt;&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.51">iterator</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true#iterator--" title="class or interface in java.lang">iterator</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true" title="class or interface in java.lang">Iterable</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.Entry.html?is-external=true" title="class or interface in java.util">Map.Entry</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>,<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSize.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSize</a>&gt;&gt;</code></dd>
@@ -334,7 +334,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionSize</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSize.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.55">getRegionSize</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo)</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSize.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.56">getRegionSize</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html#getRegionSize-org.apache.hadoop.hbase.client.RegionInfo-">RegionSizeStore</a></code></span></div>
 <div class="block">Returns the size for the give region if one exists. If no size exists, <code>null</code> is
  returned.</div>
@@ -354,7 +354,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockList">
 <li class="blockList">
 <h4>put</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.60">put</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.61">put</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
                 long&nbsp;size)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html#put-org.apache.hadoop.hbase.client.RegionInfo-long-">RegionSizeStore</a></code></span></div>
 <div class="block">Atomically sets the given <code>size</code> for a region.</div>
@@ -373,7 +373,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockList">
 <li class="blockList">
 <h4>incrementRegionSize</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.70">incrementRegionSize</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.71">incrementRegionSize</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
                                 long&nbsp;delta)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html#incrementRegionSize-org.apache.hadoop.hbase.client.RegionInfo-long-">RegionSizeStore</a></code></span></div>
 <div class="block">Atomically alter the size of a region.</div>
@@ -392,7 +392,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockList">
 <li class="blockList">
 <h4>remove</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSize.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.80">remove</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo)</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSize.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSize</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.81">remove</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html#remove-org.apache.hadoop.hbase.client.RegionInfo-">RegionSizeStore</a></code></span></div>
 <div class="block">Removes the mapping for the given key, returning the value if one exists in the store.</div>
 <dl>
@@ -411,7 +411,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockList">
 <li class="blockList">
 <h4>heapSize</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.85">heapSize</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.86">heapSize</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/io/HeapSize.html#heapSize--">heapSize</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</a></code></dd>
@@ -427,7 +427,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockList">
 <li class="blockList">
 <h4>size</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.92">size</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.93">size</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html#size--">RegionSizeStore</a></code></span></div>
 <div class="block">Returns the number of entries in the store.</div>
 <dl>
@@ -444,7 +444,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockList">
 <li class="blockList">
 <h4>isEmpty</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.97">isEmpty</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.98">isEmpty</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html#isEmpty--">RegionSizeStore</a></code></span></div>
 <div class="block">Returns if the store is empty.</div>
 <dl>
@@ -461,7 +461,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStor
 <ul class="blockListLast">
 <li class="blockList">
 <h4>clear</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.102">clear</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.html#line.103">clear</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html#clear--">RegionSizeStore</a></code></span></div>
 <div class="block">Removes all entries from the store.</div>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 6a8ab7e..4e25e3e 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -229,13 +229,13 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottlingException.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
index 3de1f41..fc611cc 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html
@@ -260,7 +260,7 @@ the order they are declared.</div>
 <ul class="blockList">
 <li class="blockList">
 <h4>values</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver">HRegion.FlushResult.Result</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html#line.578">values</a>()</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver">HRegion.FlushResult.Result</a>[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html#line.577">values</a>()</pre>
 <div class="block">Returns an array containing the constants of this enum type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -280,7 +280,7 @@ for (HRegion.FlushResult.Result c : HRegion.FlushResult.Result.values())
 <ul class="blockListLast">
 <li class="blockList">
 <h4>valueOf</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver">HRegion.FlushResult.Result</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html#line.578">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver">HRegion.FlushResult.Result</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html#line.577">valueOf</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 284abae..b438561 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -704,20 +704,20 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 23060c2..6851ee1 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,9 +130,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html b/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
index 1ffa675..6e15c9f 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.62">StoreHotnessProtector</a>
+public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.65">StoreHotnessProtector</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">StoreHotnessProtector is designed to help limit the concurrency of puts with dense columns, it
  does best-effort to avoid exhausting all RS's handlers. When a lot of clients write requests with
@@ -171,7 +171,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#FIXED_SIZE">FIXED_SIZE</a></span></code>&nbsp;</td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private static org.apache.commons.logging.Log</code></td>
+<td class="colFirst"><code>private static org.slf4j.Logger</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#LOG">LOG</a></span></code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
@@ -295,7 +295,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.apache.commons.logging.Log <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.63">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.66">LOG</a></pre>
 </li>
 </ul>
 <a name="parallelPutToStoreThreadLimit">
@@ -304,7 +304,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>parallelPutToStoreThreadLimit</h4>
-<pre>private volatile&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.64">parallelPutToStoreThreadLimit</a></pre>
+<pre>private volatile&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.67">parallelPutToStoreThreadLimit</a></pre>
 </li>
 </ul>
 <a name="parallelPreparePutToStoreThreadLimit">
@@ -313,7 +313,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>parallelPreparePutToStoreThreadLimit</h4>
-<pre>private volatile&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.66">parallelPreparePutToStoreThreadLimit</a></pre>
+<pre>private volatile&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.69">parallelPreparePutToStoreThreadLimit</a></pre>
 </li>
 </ul>
 <a name="PARALLEL_PUT_STORE_THREADS_LIMIT">
@@ -322,7 +322,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>PARALLEL_PUT_STORE_THREADS_LIMIT</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.67">PARALLEL_PUT_STORE_THREADS_LIMIT</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.70">PARALLEL_PUT_STORE_THREADS_LIMIT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector.PARALLEL_PUT_STORE_THREADS_LIMIT">Constant Field Values</a></dd>
@@ -335,7 +335,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>PARALLEL_PREPARE_PUT_STORE_MULTIPLIER</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.69">PARALLEL_PREPARE_PUT_STORE_MULTIPLIER</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.72">PARALLEL_PREPARE_PUT_STORE_MULTIPLIER</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector.PARALLEL_PREPARE_PUT_STORE_MULTIPLIER">Constant Field Values</a></dd>
@@ -348,7 +348,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT</h4>
-<pre>private static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.71">DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.74">DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector.DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT">Constant Field Values</a></dd>
@@ -361,7 +361,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>parallelPutToStoreThreadLimitCheckMinColumnCount</h4>
-<pre>private volatile&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.72">parallelPutToStoreThreadLimitCheckMinColumnCount</a></pre>
+<pre>private volatile&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.75">parallelPutToStoreThreadLimitCheckMinColumnCount</a></pre>
 </li>
 </ul>
 <a name="PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_COUNT">
@@ -370,7 +370,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_COUNT</h4>
-<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.73">PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_COUNT</a></pre>
+<pre>public static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.76">PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_COUNT</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector.PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_COUNT">Constant Field Values</a></dd>
@@ -383,7 +383,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_NUM</h4>
-<pre>private static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.75">DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_NUM</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.78">DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_NUM</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector.DEFAULT_PARALLEL_PUT_STORE_THREADS_LIMIT_MIN_COLUMN_NUM">Constant Field Values</a></dd>
@@ -396,7 +396,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_PARALLEL_PREPARE_PUT_STORE_MULTIPLIER</h4>
-<pre>private static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.76">DEFAULT_PARALLEL_PREPARE_PUT_STORE_MULTIPLIER</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.79">DEFAULT_PARALLEL_PREPARE_PUT_STORE_MULTIPLIER</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector.DEFAULT_PARALLEL_PREPARE_PUT_STORE_MULTIPLIER">Constant Field Values</a></dd>
@@ -409,7 +409,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>preparePutToStoreMap</h4>
-<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.78">preparePutToStoreMap</a></pre>
+<pre>private final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a>&gt; <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.81">preparePutToStoreMap</a></pre>
 </li>
 </ul>
 <a name="region">
@@ -418,7 +418,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>region</h4>
-<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/Region.html" title="interface in org.apache.hadoop.hbase.regionserver">Region</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.80">region</a></pre>
+<pre>private final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/regionserver/Region.html" title="interface in org.apache.hadoop.hbase.regionserver">Region</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.83">region</a></pre>
 </li>
 </ul>
 <a name="FIXED_SIZE">
@@ -427,7 +427,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>FIXED_SIZE</h4>
-<pre>public static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.194">FIXED_SIZE</a></pre>
+<pre>public static final&nbsp;long <a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.197">FIXED_SIZE</a></pre>
 </li>
 </ul>
 </li>
@@ -444,7 +444,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>StoreHotnessProtector</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.82">StoreHotnessProtector</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/Region.html" title="interface in org.apache.hadoop.hbase.regionserver">Region</a>&nbsp;region,
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.85">StoreHotnessProtector</a>(<a href="../../../../../../org/apache/hadoop/hbase/regionserver/Region.html" title="interface in org.apache.hadoop.hbase.regionserver">Region</a>&nbsp;region,
                              org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
@@ -462,7 +462,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>init</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.87">init</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.90">init</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 <a name="update-org.apache.hadoop.conf.Configuration-">
@@ -471,7 +471,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>update</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.98">update</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.101">update</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 <a name="start-java.util.Map-">
@@ -480,7 +480,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>start</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.104">start</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&gt;&nbsp;familyMaps)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.107">start</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&gt;&nbsp;familyMaps)
            throws <a href="../../../../../../org/apache/hadoop/hbase/RegionTooBusyException.html" title="class in org.apache.hadoop.hbase">RegionTooBusyException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -494,7 +494,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>finish</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.153">finish</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&gt;&nbsp;familyMaps)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.156">finish</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../../org/apache/hadoop/hbase/Cell.html" title="interface in org.apache.hadoop.hbase">Cell</a>&gt;&gt;&nbsp;familyMaps)</pre>
 </li>
 </ul>
 <a name="toString--">
@@ -503,7 +503,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.175">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.178">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -516,7 +516,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isEnable</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.184">isEnable</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.187">isEnable</a>()</pre>
 </li>
 </ul>
 <a name="getPreparePutToStoreMap--">
@@ -525,7 +525,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getPreparePutToStoreMap</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.190">getPreparePutToStoreMap</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;byte[],<a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true" title="class or interface in java.util.concurrent.atomic">AtomicInteger</a>&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html#line.193">getPreparePutToStoreMap</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index d02c856..bf7ef69 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -137,9 +137,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index 67b7e3a..6ef281d 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -191,9 +191,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslUtil.QualityOfProtection</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index cc1c42f..a105f85 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -532,14 +532,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index 9ec5b35..c0e28b4 100644
--- a/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -189,8 +189,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/RegionGroupingProvider.Strategies.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">RegionGroupingProvider.Strategies</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.wal.<a href="../../../../../org/apache/hadoop/hbase/wal/WALFactory.Providers.html" title="enum in org.apache.hadoop.hbase.wal"><span class="typeNameLink">WALFactory.Providers</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index ce68f9b..5aca783 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 <span class="sourceLineNo">008</span>@InterfaceAudience.Private<a name="line.8"></a>
 <span class="sourceLineNo">009</span>public class Version {<a name="line.9"></a>
 <span class="sourceLineNo">010</span>  public static final String version = "3.0.0-SNAPSHOT";<a name="line.10"></a>
-<span class="sourceLineNo">011</span>  public static final String revision = "556b22374423ff087c0583d02ae4298d4d4f2e6b";<a name="line.11"></a>
+<span class="sourceLineNo">011</span>  public static final String revision = "914de1141699142bce1486468a742233d9440b23";<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String user = "jenkins";<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String date = "Thu Apr 19 14:39:00 UTC 2018";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String date = "Fri Apr 20 14:39:14 UTC 2018";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String url = "git://jenkins-websites1.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String srcChecksum = "83ef0b63e39df660933d8e09ab06a005";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String srcChecksum = "8a7d9057695428a69e4cd5d02ff0686c";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>}<a name="line.16"></a>