You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by gi...@apache.org on 2018/11/27 14:52:33 UTC

[01/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 3d72455c6 -> dccdd2743


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html
index 1890d8b..755ceef 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html
@@ -147,243 +147,244 @@
 <span class="sourceLineNo">139</span><a name="line.139"></a>
 <span class="sourceLineNo">140</span>    fs = FileSystem.get(conf);<a name="line.140"></a>
 <span class="sourceLineNo">141</span><a name="line.141"></a>
-<span class="sourceLineNo">142</span>    cacheConf = new CacheConfig(conf);<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    blockCache = cacheConf.getBlockCache();<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    assertNotNull(blockCache);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>  private List&lt;KeyValue&gt; createSortedKeyValues(Random rand, int n) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    List&lt;KeyValue&gt; kvList = new ArrayList&lt;&gt;(n);<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    for (int i = 0; i &lt; n; ++i)<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      kvList.add(RandomKeyValueUtil.randomKeyValue(rand));<a name="line.150"></a>
-<span class="sourceLineNo">151</span>    Collections.sort(kvList, CellComparatorImpl.COMPARATOR);<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    return kvList;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>  }<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>  @Test<a name="line.155"></a>
-<span class="sourceLineNo">156</span>  public void testCompoundBloomFilter() throws IOException {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    for (int t = 0; t &lt; NUM_TESTS; ++t) {<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE,<a name="line.159"></a>
-<span class="sourceLineNo">160</span>          (float) TARGET_ERROR_RATES[t]);<a name="line.160"></a>
-<span class="sourceLineNo">161</span><a name="line.161"></a>
-<span class="sourceLineNo">162</span>      testIdMsg = "in test #" + t + ":";<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      Random generationRand = new Random(GENERATION_SEED);<a name="line.163"></a>
-<span class="sourceLineNo">164</span>      List&lt;KeyValue&gt; kvs = createSortedKeyValues(generationRand, NUM_KV[t]);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      BloomType bt = BLOOM_TYPES[t];<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      Path sfPath = writeStoreFile(t, bt, kvs);<a name="line.166"></a>
-<span class="sourceLineNo">167</span>      readStoreFile(t, bt, kvs, sfPath);<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
-<span class="sourceLineNo">169</span>  }<a name="line.169"></a>
-<span class="sourceLineNo">170</span><a name="line.170"></a>
-<span class="sourceLineNo">171</span>  /**<a name="line.171"></a>
-<span class="sourceLineNo">172</span>   * Validates the false positive ratio by computing its z-value and comparing<a name="line.172"></a>
-<span class="sourceLineNo">173</span>   * it to the provided threshold.<a name="line.173"></a>
-<span class="sourceLineNo">174</span>   *<a name="line.174"></a>
-<span class="sourceLineNo">175</span>   * @param falsePosRate experimental positive rate<a name="line.175"></a>
-<span class="sourceLineNo">176</span>   * @param nTrials the number of Bloom filter checks<a name="line.176"></a>
-<span class="sourceLineNo">177</span>   * @param zValueBoundary z-value boundary, positive for an upper bound and<a name="line.177"></a>
-<span class="sourceLineNo">178</span>   *          negative for a lower bound<a name="line.178"></a>
-<span class="sourceLineNo">179</span>   * @param cbf the compound Bloom filter we are using<a name="line.179"></a>
-<span class="sourceLineNo">180</span>   * @param additionalMsg additional message to include in log output and<a name="line.180"></a>
-<span class="sourceLineNo">181</span>   *          assertion failures<a name="line.181"></a>
-<span class="sourceLineNo">182</span>   */<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  private void validateFalsePosRate(double falsePosRate, int nTrials,<a name="line.183"></a>
-<span class="sourceLineNo">184</span>      double zValueBoundary, CompoundBloomFilter cbf, String additionalMsg) {<a name="line.184"></a>
-<span class="sourceLineNo">185</span>    double p = BloomFilterFactory.getErrorRate(conf);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    double zValue = (falsePosRate - p) / Math.sqrt(p * (1 - p) / nTrials);<a name="line.186"></a>
-<span class="sourceLineNo">187</span><a name="line.187"></a>
-<span class="sourceLineNo">188</span>    String assortedStatsStr = " (targetErrorRate=" + p + ", falsePosRate="<a name="line.188"></a>
-<span class="sourceLineNo">189</span>        + falsePosRate + ", nTrials=" + nTrials + ")";<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    LOG.info("z-value is " + zValue + assortedStatsStr);<a name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>    boolean isUpperBound = zValueBoundary &gt; 0;<a name="line.192"></a>
-<span class="sourceLineNo">193</span><a name="line.193"></a>
-<span class="sourceLineNo">194</span>    if (isUpperBound &amp;&amp; zValue &gt; zValueBoundary ||<a name="line.194"></a>
-<span class="sourceLineNo">195</span>        !isUpperBound &amp;&amp; zValue &lt; zValueBoundary) {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>      String errorMsg = "False positive rate z-value " + zValue + " is "<a name="line.196"></a>
-<span class="sourceLineNo">197</span>          + (isUpperBound ? "higher" : "lower") + " than " + zValueBoundary<a name="line.197"></a>
-<span class="sourceLineNo">198</span>          + assortedStatsStr + ". Per-chunk stats:\n"<a name="line.198"></a>
-<span class="sourceLineNo">199</span>          + cbf.formatTestingStats();<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      fail(errorMsg + additionalMsg);<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  }<a name="line.202"></a>
-<span class="sourceLineNo">203</span><a name="line.203"></a>
-<span class="sourceLineNo">204</span>  private void readStoreFile(int t, BloomType bt, List&lt;KeyValue&gt; kvs,<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      Path sfPath) throws IOException {<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    HStoreFile sf = new HStoreFile(fs, sfPath, conf, cacheConf, bt, true);<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    sf.initReader();<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    StoreFileReader r = sf.getReader();<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    final boolean pread = true; // does not really matter<a name="line.209"></a>
-<span class="sourceLineNo">210</span>    StoreFileScanner scanner = r.getStoreFileScanner(true, pread, false, 0, 0, false);<a name="line.210"></a>
-<span class="sourceLineNo">211</span><a name="line.211"></a>
-<span class="sourceLineNo">212</span>    {<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      // Test for false negatives (not allowed).<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      int numChecked = 0;<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      for (KeyValue kv : kvs) {<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        byte[] row = CellUtil.cloneRow(kv);<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        boolean present = isInBloom(scanner, row, CellUtil.cloneQualifier(kv));<a name="line.217"></a>
-<span class="sourceLineNo">218</span>        assertTrue(testIdMsg + " Bloom filter false negative on row "<a name="line.218"></a>
-<span class="sourceLineNo">219</span>            + Bytes.toStringBinary(row) + " after " + numChecked<a name="line.219"></a>
-<span class="sourceLineNo">220</span>            + " successful checks", present);<a name="line.220"></a>
-<span class="sourceLineNo">221</span>        ++numChecked;<a name="line.221"></a>
-<span class="sourceLineNo">222</span>      }<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    }<a name="line.223"></a>
-<span class="sourceLineNo">224</span><a name="line.224"></a>
-<span class="sourceLineNo">225</span>    // Test for false positives (some percentage allowed). We test in two modes:<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    // "fake lookup" which ignores the key distribution, and production mode.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    for (boolean fakeLookupEnabled : new boolean[] { true, false }) {<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      if (fakeLookupEnabled) {<a name="line.228"></a>
-<span class="sourceLineNo">229</span>        BloomFilterUtil.setRandomGeneratorForTest(new Random(283742987L));<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      }<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      try {<a name="line.231"></a>
-<span class="sourceLineNo">232</span>        String fakeLookupModeStr = ", fake lookup is " + (fakeLookupEnabled ?<a name="line.232"></a>
-<span class="sourceLineNo">233</span>            "enabled" : "disabled");<a name="line.233"></a>
-<span class="sourceLineNo">234</span>        CompoundBloomFilter cbf = (CompoundBloomFilter) r.getGeneralBloomFilter();<a name="line.234"></a>
-<span class="sourceLineNo">235</span>        cbf.enableTestingStats();<a name="line.235"></a>
-<span class="sourceLineNo">236</span>        int numFalsePos = 0;<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        Random rand = new Random(EVALUATION_SEED);<a name="line.237"></a>
-<span class="sourceLineNo">238</span>        int nTrials = NUM_KV[t] * 10;<a name="line.238"></a>
-<span class="sourceLineNo">239</span>        for (int i = 0; i &lt; nTrials; ++i) {<a name="line.239"></a>
-<span class="sourceLineNo">240</span>          byte[] query = RandomKeyValueUtil.randomRowOrQualifier(rand);<a name="line.240"></a>
-<span class="sourceLineNo">241</span>          if (isInBloom(scanner, query, bt, rand)) {<a name="line.241"></a>
-<span class="sourceLineNo">242</span>            numFalsePos += 1;<a name="line.242"></a>
-<span class="sourceLineNo">243</span>          }<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        }<a name="line.244"></a>
-<span class="sourceLineNo">245</span>        double falsePosRate = numFalsePos * 1.0 / nTrials;<a name="line.245"></a>
-<span class="sourceLineNo">246</span>        LOG.debug(String.format(testIdMsg<a name="line.246"></a>
-<span class="sourceLineNo">247</span>            + " False positives: %d out of %d (%f)",<a name="line.247"></a>
-<span class="sourceLineNo">248</span>            numFalsePos, nTrials, falsePosRate) + fakeLookupModeStr);<a name="line.248"></a>
-<span class="sourceLineNo">249</span><a name="line.249"></a>
-<span class="sourceLineNo">250</span>        // Check for obvious Bloom filter crashes.<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        assertTrue("False positive is too high: " + falsePosRate + " (greater "<a name="line.251"></a>
-<span class="sourceLineNo">252</span>            + "than " + TOO_HIGH_ERROR_RATE + ")" + fakeLookupModeStr,<a name="line.252"></a>
-<span class="sourceLineNo">253</span>            falsePosRate &lt; TOO_HIGH_ERROR_RATE);<a name="line.253"></a>
-<span class="sourceLineNo">254</span><a name="line.254"></a>
-<span class="sourceLineNo">255</span>        // Now a more precise check to see if the false positive rate is not<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        // too high. The reason we use a relaxed restriction for the real-world<a name="line.256"></a>
-<span class="sourceLineNo">257</span>        // case as opposed to the "fake lookup" case is that our hash functions<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        // are not completely independent.<a name="line.258"></a>
-<span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>        double maxZValue = fakeLookupEnabled ? 1.96 : 2.5;<a name="line.260"></a>
-<span class="sourceLineNo">261</span>        validateFalsePosRate(falsePosRate, nTrials, maxZValue, cbf,<a name="line.261"></a>
-<span class="sourceLineNo">262</span>            fakeLookupModeStr);<a name="line.262"></a>
-<span class="sourceLineNo">263</span><a name="line.263"></a>
-<span class="sourceLineNo">264</span>        // For checking the lower bound we need to eliminate the last chunk,<a name="line.264"></a>
-<span class="sourceLineNo">265</span>        // because it is frequently smaller and the false positive rate in it<a name="line.265"></a>
-<span class="sourceLineNo">266</span>        // is too low. This does not help if there is only one under-sized<a name="line.266"></a>
-<span class="sourceLineNo">267</span>        // chunk, though.<a name="line.267"></a>
-<span class="sourceLineNo">268</span>        int nChunks = cbf.getNumChunks();<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        if (nChunks &gt; 1) {<a name="line.269"></a>
-<span class="sourceLineNo">270</span>          numFalsePos -= cbf.getNumPositivesForTesting(nChunks - 1);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>          nTrials -= cbf.getNumQueriesForTesting(nChunks - 1);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>          falsePosRate = numFalsePos * 1.0 / nTrials;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>          LOG.info(testIdMsg + " False positive rate without last chunk is " +<a name="line.273"></a>
-<span class="sourceLineNo">274</span>              falsePosRate + fakeLookupModeStr);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>        }<a name="line.275"></a>
-<span class="sourceLineNo">276</span><a name="line.276"></a>
-<span class="sourceLineNo">277</span>        validateFalsePosRate(falsePosRate, nTrials, -2.58, cbf,<a name="line.277"></a>
-<span class="sourceLineNo">278</span>            fakeLookupModeStr);<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      } finally {<a name="line.279"></a>
-<span class="sourceLineNo">280</span>        BloomFilterUtil.setRandomGeneratorForTest(null);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      }<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>    r.close(true); // end of test so evictOnClose<a name="line.284"></a>
-<span class="sourceLineNo">285</span>  }<a name="line.285"></a>
-<span class="sourceLineNo">286</span><a name="line.286"></a>
-<span class="sourceLineNo">287</span>  private boolean isInBloom(StoreFileScanner scanner, byte[] row, BloomType bt,<a name="line.287"></a>
-<span class="sourceLineNo">288</span>      Random rand) {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    return isInBloom(scanner, row, RandomKeyValueUtil.randomRowOrQualifier(rand));<a name="line.289"></a>
-<span class="sourceLineNo">290</span>  }<a name="line.290"></a>
-<span class="sourceLineNo">291</span><a name="line.291"></a>
-<span class="sourceLineNo">292</span>  private boolean isInBloom(StoreFileScanner scanner, byte[] row,<a name="line.292"></a>
-<span class="sourceLineNo">293</span>      byte[] qualifier) {<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    Scan scan = new Scan().withStartRow(row).withStopRow(row, true);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    scan.addColumn(Bytes.toBytes(RandomKeyValueUtil.COLUMN_FAMILY_NAME), qualifier);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    HStore store = mock(HStore.class);<a name="line.296"></a>
-<span class="sourceLineNo">297</span>    when(store.getColumnFamilyDescriptor())<a name="line.297"></a>
-<span class="sourceLineNo">298</span>        .thenReturn(ColumnFamilyDescriptorBuilder.of(RandomKeyValueUtil.COLUMN_FAMILY_NAME));<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    return scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>  }<a name="line.300"></a>
-<span class="sourceLineNo">301</span><a name="line.301"></a>
-<span class="sourceLineNo">302</span>  private Path writeStoreFile(int t, BloomType bt, List&lt;KeyValue&gt; kvs)<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      throws IOException {<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,<a name="line.304"></a>
-<span class="sourceLineNo">305</span>        BLOOM_BLOCK_SIZES[t]);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>    cacheConf = new CacheConfig(conf);<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCK_SIZES[t]).build();<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    StoreFileWriter w = new StoreFileWriter.Builder(conf, cacheConf, fs)<a name="line.309"></a>
-<span class="sourceLineNo">310</span>            .withOutputDir(TEST_UTIL.getDataTestDir())<a name="line.310"></a>
-<span class="sourceLineNo">311</span>            .withBloomType(bt)<a name="line.311"></a>
-<span class="sourceLineNo">312</span>            .withFileContext(meta)<a name="line.312"></a>
-<span class="sourceLineNo">313</span>            .build();<a name="line.313"></a>
-<span class="sourceLineNo">314</span><a name="line.314"></a>
-<span class="sourceLineNo">315</span>    assertTrue(w.hasGeneralBloom());<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    assertTrue(w.getGeneralBloomWriter() instanceof CompoundBloomFilterWriter);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    CompoundBloomFilterWriter cbbf =<a name="line.317"></a>
-<span class="sourceLineNo">318</span>        (CompoundBloomFilterWriter) w.getGeneralBloomWriter();<a name="line.318"></a>
-<span class="sourceLineNo">319</span><a name="line.319"></a>
-<span class="sourceLineNo">320</span>    int keyCount = 0;<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    KeyValue prev = null;<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    LOG.debug("Total keys/values to insert: " + kvs.size());<a name="line.322"></a>
-<span class="sourceLineNo">323</span>    for (KeyValue kv : kvs) {<a name="line.323"></a>
-<span class="sourceLineNo">324</span>      w.append(kv);<a name="line.324"></a>
-<span class="sourceLineNo">325</span><a name="line.325"></a>
-<span class="sourceLineNo">326</span>      // Validate the key count in the Bloom filter.<a name="line.326"></a>
-<span class="sourceLineNo">327</span>      boolean newKey = true;<a name="line.327"></a>
-<span class="sourceLineNo">328</span>      if (prev != null) {<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        newKey = !(bt == BloomType.ROW ? CellUtil.matchingRows(kv,<a name="line.329"></a>
-<span class="sourceLineNo">330</span>            prev) : CellUtil.matchingRowColumn(kv, prev));<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      }<a name="line.331"></a>
-<span class="sourceLineNo">332</span>      if (newKey)<a name="line.332"></a>
-<span class="sourceLineNo">333</span>        ++keyCount;<a name="line.333"></a>
-<span class="sourceLineNo">334</span>      assertEquals(keyCount, cbbf.getKeyCount());<a name="line.334"></a>
-<span class="sourceLineNo">335</span><a name="line.335"></a>
-<span class="sourceLineNo">336</span>      prev = kv;<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    }<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    w.close();<a name="line.338"></a>
-<span class="sourceLineNo">339</span><a name="line.339"></a>
-<span class="sourceLineNo">340</span>    return w.getPath();<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  }<a name="line.341"></a>
-<span class="sourceLineNo">342</span><a name="line.342"></a>
-<span class="sourceLineNo">343</span>  @Test<a name="line.343"></a>
-<span class="sourceLineNo">344</span>  public void testCompoundBloomSizing() {<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    int bloomBlockByteSize = 4096;<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    int bloomBlockBitSize = bloomBlockByteSize * 8;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    double targetErrorRate = 0.01;<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    long maxKeysPerChunk = BloomFilterUtil.idealMaxKeys(bloomBlockBitSize,<a name="line.348"></a>
-<span class="sourceLineNo">349</span>        targetErrorRate);<a name="line.349"></a>
-<span class="sourceLineNo">350</span><a name="line.350"></a>
-<span class="sourceLineNo">351</span>    long bloomSize1 = bloomBlockByteSize * 8;<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    long bloomSize2 = BloomFilterUtil.computeBitSize(maxKeysPerChunk,<a name="line.352"></a>
-<span class="sourceLineNo">353</span>        targetErrorRate);<a name="line.353"></a>
-<span class="sourceLineNo">354</span><a name="line.354"></a>
-<span class="sourceLineNo">355</span>    double bloomSizeRatio = (bloomSize2 * 1.0 / bloomSize1);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    assertTrue(Math.abs(bloomSizeRatio - 0.9999) &lt; 0.0001);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>  }<a name="line.357"></a>
-<span class="sourceLineNo">358</span><a name="line.358"></a>
-<span class="sourceLineNo">359</span>  @Test<a name="line.359"></a>
-<span class="sourceLineNo">360</span>  public void testCreateKey() {<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    byte[] row = "myRow".getBytes();<a name="line.361"></a>
-<span class="sourceLineNo">362</span>    byte[] qualifier = "myQualifier".getBytes();<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    // Mimic what Storefile.createBloomKeyValue() does<a name="line.363"></a>
-<span class="sourceLineNo">364</span>    byte[] rowKey = KeyValueUtil.createFirstOnRow(row, 0, row.length, new byte[0], 0, 0, row, 0, 0).getKey();<a name="line.364"></a>
-<span class="sourceLineNo">365</span>    byte[] rowColKey = KeyValueUtil.createFirstOnRow(row, 0, row.length,<a name="line.365"></a>
-<span class="sourceLineNo">366</span>        new byte[0], 0, 0, qualifier, 0, qualifier.length).getKey();<a name="line.366"></a>
-<span class="sourceLineNo">367</span>    KeyValue rowKV = KeyValueUtil.createKeyValueFromKey(rowKey);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    KeyValue rowColKV = KeyValueUtil.createKeyValueFromKey(rowColKey);<a name="line.368"></a>
-<span class="sourceLineNo">369</span>    assertEquals(rowKV.getTimestamp(), rowColKV.getTimestamp());<a name="line.369"></a>
-<span class="sourceLineNo">370</span>    assertEquals(Bytes.toStringBinary(rowKV.getRowArray(), rowKV.getRowOffset(),<a name="line.370"></a>
-<span class="sourceLineNo">371</span>      rowKV.getRowLength()), Bytes.toStringBinary(rowColKV.getRowArray(), rowColKV.getRowOffset(),<a name="line.371"></a>
-<span class="sourceLineNo">372</span>      rowColKV.getRowLength()));<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    assertEquals(0, rowKV.getQualifierLength());<a name="line.373"></a>
-<span class="sourceLineNo">374</span>  }<a name="line.374"></a>
-<span class="sourceLineNo">375</span><a name="line.375"></a>
+<span class="sourceLineNo">142</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    cacheConf = new CacheConfig(conf);<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    blockCache = cacheConf.getBlockCache();<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    assertNotNull(blockCache);<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  }<a name="line.146"></a>
+<span class="sourceLineNo">147</span><a name="line.147"></a>
+<span class="sourceLineNo">148</span>  private List&lt;KeyValue&gt; createSortedKeyValues(Random rand, int n) {<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    List&lt;KeyValue&gt; kvList = new ArrayList&lt;&gt;(n);<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    for (int i = 0; i &lt; n; ++i)<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      kvList.add(RandomKeyValueUtil.randomKeyValue(rand));<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    Collections.sort(kvList, CellComparatorImpl.COMPARATOR);<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    return kvList;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>  }<a name="line.154"></a>
+<span class="sourceLineNo">155</span><a name="line.155"></a>
+<span class="sourceLineNo">156</span>  @Test<a name="line.156"></a>
+<span class="sourceLineNo">157</span>  public void testCompoundBloomFilter() throws IOException {<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    for (int t = 0; t &lt; NUM_TESTS; ++t) {<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE,<a name="line.160"></a>
+<span class="sourceLineNo">161</span>          (float) TARGET_ERROR_RATES[t]);<a name="line.161"></a>
+<span class="sourceLineNo">162</span><a name="line.162"></a>
+<span class="sourceLineNo">163</span>      testIdMsg = "in test #" + t + ":";<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      Random generationRand = new Random(GENERATION_SEED);<a name="line.164"></a>
+<span class="sourceLineNo">165</span>      List&lt;KeyValue&gt; kvs = createSortedKeyValues(generationRand, NUM_KV[t]);<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      BloomType bt = BLOOM_TYPES[t];<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      Path sfPath = writeStoreFile(t, bt, kvs);<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      readStoreFile(t, bt, kvs, sfPath);<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    }<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  }<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>  /**<a name="line.172"></a>
+<span class="sourceLineNo">173</span>   * Validates the false positive ratio by computing its z-value and comparing<a name="line.173"></a>
+<span class="sourceLineNo">174</span>   * it to the provided threshold.<a name="line.174"></a>
+<span class="sourceLineNo">175</span>   *<a name="line.175"></a>
+<span class="sourceLineNo">176</span>   * @param falsePosRate experimental positive rate<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * @param nTrials the number of Bloom filter checks<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * @param zValueBoundary z-value boundary, positive for an upper bound and<a name="line.178"></a>
+<span class="sourceLineNo">179</span>   *          negative for a lower bound<a name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @param cbf the compound Bloom filter we are using<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   * @param additionalMsg additional message to include in log output and<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   *          assertion failures<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   */<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  private void validateFalsePosRate(double falsePosRate, int nTrials,<a name="line.184"></a>
+<span class="sourceLineNo">185</span>      double zValueBoundary, CompoundBloomFilter cbf, String additionalMsg) {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>    double p = BloomFilterFactory.getErrorRate(conf);<a name="line.186"></a>
+<span class="sourceLineNo">187</span>    double zValue = (falsePosRate - p) / Math.sqrt(p * (1 - p) / nTrials);<a name="line.187"></a>
+<span class="sourceLineNo">188</span><a name="line.188"></a>
+<span class="sourceLineNo">189</span>    String assortedStatsStr = " (targetErrorRate=" + p + ", falsePosRate="<a name="line.189"></a>
+<span class="sourceLineNo">190</span>        + falsePosRate + ", nTrials=" + nTrials + ")";<a name="line.190"></a>
+<span class="sourceLineNo">191</span>    LOG.info("z-value is " + zValue + assortedStatsStr);<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>    boolean isUpperBound = zValueBoundary &gt; 0;<a name="line.193"></a>
+<span class="sourceLineNo">194</span><a name="line.194"></a>
+<span class="sourceLineNo">195</span>    if (isUpperBound &amp;&amp; zValue &gt; zValueBoundary ||<a name="line.195"></a>
+<span class="sourceLineNo">196</span>        !isUpperBound &amp;&amp; zValue &lt; zValueBoundary) {<a name="line.196"></a>
+<span class="sourceLineNo">197</span>      String errorMsg = "False positive rate z-value " + zValue + " is "<a name="line.197"></a>
+<span class="sourceLineNo">198</span>          + (isUpperBound ? "higher" : "lower") + " than " + zValueBoundary<a name="line.198"></a>
+<span class="sourceLineNo">199</span>          + assortedStatsStr + ". Per-chunk stats:\n"<a name="line.199"></a>
+<span class="sourceLineNo">200</span>          + cbf.formatTestingStats();<a name="line.200"></a>
+<span class="sourceLineNo">201</span>      fail(errorMsg + additionalMsg);<a name="line.201"></a>
+<span class="sourceLineNo">202</span>    }<a name="line.202"></a>
+<span class="sourceLineNo">203</span>  }<a name="line.203"></a>
+<span class="sourceLineNo">204</span><a name="line.204"></a>
+<span class="sourceLineNo">205</span>  private void readStoreFile(int t, BloomType bt, List&lt;KeyValue&gt; kvs,<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      Path sfPath) throws IOException {<a name="line.206"></a>
+<span class="sourceLineNo">207</span>    HStoreFile sf = new HStoreFile(fs, sfPath, conf, cacheConf, bt, true);<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    sf.initReader();<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    StoreFileReader r = sf.getReader();<a name="line.209"></a>
+<span class="sourceLineNo">210</span>    final boolean pread = true; // does not really matter<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    StoreFileScanner scanner = r.getStoreFileScanner(true, pread, false, 0, 0, false);<a name="line.211"></a>
+<span class="sourceLineNo">212</span><a name="line.212"></a>
+<span class="sourceLineNo">213</span>    {<a name="line.213"></a>
+<span class="sourceLineNo">214</span>      // Test for false negatives (not allowed).<a name="line.214"></a>
+<span class="sourceLineNo">215</span>      int numChecked = 0;<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      for (KeyValue kv : kvs) {<a name="line.216"></a>
+<span class="sourceLineNo">217</span>        byte[] row = CellUtil.cloneRow(kv);<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        boolean present = isInBloom(scanner, row, CellUtil.cloneQualifier(kv));<a name="line.218"></a>
+<span class="sourceLineNo">219</span>        assertTrue(testIdMsg + " Bloom filter false negative on row "<a name="line.219"></a>
+<span class="sourceLineNo">220</span>            + Bytes.toStringBinary(row) + " after " + numChecked<a name="line.220"></a>
+<span class="sourceLineNo">221</span>            + " successful checks", present);<a name="line.221"></a>
+<span class="sourceLineNo">222</span>        ++numChecked;<a name="line.222"></a>
+<span class="sourceLineNo">223</span>      }<a name="line.223"></a>
+<span class="sourceLineNo">224</span>    }<a name="line.224"></a>
+<span class="sourceLineNo">225</span><a name="line.225"></a>
+<span class="sourceLineNo">226</span>    // Test for false positives (some percentage allowed). We test in two modes:<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    // "fake lookup" which ignores the key distribution, and production mode.<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    for (boolean fakeLookupEnabled : new boolean[] { true, false }) {<a name="line.228"></a>
+<span class="sourceLineNo">229</span>      if (fakeLookupEnabled) {<a name="line.229"></a>
+<span class="sourceLineNo">230</span>        BloomFilterUtil.setRandomGeneratorForTest(new Random(283742987L));<a name="line.230"></a>
+<span class="sourceLineNo">231</span>      }<a name="line.231"></a>
+<span class="sourceLineNo">232</span>      try {<a name="line.232"></a>
+<span class="sourceLineNo">233</span>        String fakeLookupModeStr = ", fake lookup is " + (fakeLookupEnabled ?<a name="line.233"></a>
+<span class="sourceLineNo">234</span>            "enabled" : "disabled");<a name="line.234"></a>
+<span class="sourceLineNo">235</span>        CompoundBloomFilter cbf = (CompoundBloomFilter) r.getGeneralBloomFilter();<a name="line.235"></a>
+<span class="sourceLineNo">236</span>        cbf.enableTestingStats();<a name="line.236"></a>
+<span class="sourceLineNo">237</span>        int numFalsePos = 0;<a name="line.237"></a>
+<span class="sourceLineNo">238</span>        Random rand = new Random(EVALUATION_SEED);<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        int nTrials = NUM_KV[t] * 10;<a name="line.239"></a>
+<span class="sourceLineNo">240</span>        for (int i = 0; i &lt; nTrials; ++i) {<a name="line.240"></a>
+<span class="sourceLineNo">241</span>          byte[] query = RandomKeyValueUtil.randomRowOrQualifier(rand);<a name="line.241"></a>
+<span class="sourceLineNo">242</span>          if (isInBloom(scanner, query, bt, rand)) {<a name="line.242"></a>
+<span class="sourceLineNo">243</span>            numFalsePos += 1;<a name="line.243"></a>
+<span class="sourceLineNo">244</span>          }<a name="line.244"></a>
+<span class="sourceLineNo">245</span>        }<a name="line.245"></a>
+<span class="sourceLineNo">246</span>        double falsePosRate = numFalsePos * 1.0 / nTrials;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>        LOG.debug(String.format(testIdMsg<a name="line.247"></a>
+<span class="sourceLineNo">248</span>            + " False positives: %d out of %d (%f)",<a name="line.248"></a>
+<span class="sourceLineNo">249</span>            numFalsePos, nTrials, falsePosRate) + fakeLookupModeStr);<a name="line.249"></a>
+<span class="sourceLineNo">250</span><a name="line.250"></a>
+<span class="sourceLineNo">251</span>        // Check for obvious Bloom filter crashes.<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        assertTrue("False positive is too high: " + falsePosRate + " (greater "<a name="line.252"></a>
+<span class="sourceLineNo">253</span>            + "than " + TOO_HIGH_ERROR_RATE + ")" + fakeLookupModeStr,<a name="line.253"></a>
+<span class="sourceLineNo">254</span>            falsePosRate &lt; TOO_HIGH_ERROR_RATE);<a name="line.254"></a>
+<span class="sourceLineNo">255</span><a name="line.255"></a>
+<span class="sourceLineNo">256</span>        // Now a more precise check to see if the false positive rate is not<a name="line.256"></a>
+<span class="sourceLineNo">257</span>        // too high. The reason we use a relaxed restriction for the real-world<a name="line.257"></a>
+<span class="sourceLineNo">258</span>        // case as opposed to the "fake lookup" case is that our hash functions<a name="line.258"></a>
+<span class="sourceLineNo">259</span>        // are not completely independent.<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>        double maxZValue = fakeLookupEnabled ? 1.96 : 2.5;<a name="line.261"></a>
+<span class="sourceLineNo">262</span>        validateFalsePosRate(falsePosRate, nTrials, maxZValue, cbf,<a name="line.262"></a>
+<span class="sourceLineNo">263</span>            fakeLookupModeStr);<a name="line.263"></a>
+<span class="sourceLineNo">264</span><a name="line.264"></a>
+<span class="sourceLineNo">265</span>        // For checking the lower bound we need to eliminate the last chunk,<a name="line.265"></a>
+<span class="sourceLineNo">266</span>        // because it is frequently smaller and the false positive rate in it<a name="line.266"></a>
+<span class="sourceLineNo">267</span>        // is too low. This does not help if there is only one under-sized<a name="line.267"></a>
+<span class="sourceLineNo">268</span>        // chunk, though.<a name="line.268"></a>
+<span class="sourceLineNo">269</span>        int nChunks = cbf.getNumChunks();<a name="line.269"></a>
+<span class="sourceLineNo">270</span>        if (nChunks &gt; 1) {<a name="line.270"></a>
+<span class="sourceLineNo">271</span>          numFalsePos -= cbf.getNumPositivesForTesting(nChunks - 1);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>          nTrials -= cbf.getNumQueriesForTesting(nChunks - 1);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>          falsePosRate = numFalsePos * 1.0 / nTrials;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>          LOG.info(testIdMsg + " False positive rate without last chunk is " +<a name="line.274"></a>
+<span class="sourceLineNo">275</span>              falsePosRate + fakeLookupModeStr);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>        }<a name="line.276"></a>
+<span class="sourceLineNo">277</span><a name="line.277"></a>
+<span class="sourceLineNo">278</span>        validateFalsePosRate(falsePosRate, nTrials, -2.58, cbf,<a name="line.278"></a>
+<span class="sourceLineNo">279</span>            fakeLookupModeStr);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      } finally {<a name="line.280"></a>
+<span class="sourceLineNo">281</span>        BloomFilterUtil.setRandomGeneratorForTest(null);<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      }<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    }<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>    r.close(true); // end of test so evictOnClose<a name="line.285"></a>
+<span class="sourceLineNo">286</span>  }<a name="line.286"></a>
+<span class="sourceLineNo">287</span><a name="line.287"></a>
+<span class="sourceLineNo">288</span>  private boolean isInBloom(StoreFileScanner scanner, byte[] row, BloomType bt,<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      Random rand) {<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    return isInBloom(scanner, row, RandomKeyValueUtil.randomRowOrQualifier(rand));<a name="line.290"></a>
+<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
+<span class="sourceLineNo">292</span><a name="line.292"></a>
+<span class="sourceLineNo">293</span>  private boolean isInBloom(StoreFileScanner scanner, byte[] row,<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      byte[] qualifier) {<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    Scan scan = new Scan().withStartRow(row).withStopRow(row, true);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>    scan.addColumn(Bytes.toBytes(RandomKeyValueUtil.COLUMN_FAMILY_NAME), qualifier);<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    HStore store = mock(HStore.class);<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    when(store.getColumnFamilyDescriptor())<a name="line.298"></a>
+<span class="sourceLineNo">299</span>        .thenReturn(ColumnFamilyDescriptorBuilder.of(RandomKeyValueUtil.COLUMN_FAMILY_NAME));<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    return scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>  }<a name="line.301"></a>
+<span class="sourceLineNo">302</span><a name="line.302"></a>
+<span class="sourceLineNo">303</span>  private Path writeStoreFile(int t, BloomType bt, List&lt;KeyValue&gt; kvs)<a name="line.303"></a>
+<span class="sourceLineNo">304</span>      throws IOException {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>    conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,<a name="line.305"></a>
+<span class="sourceLineNo">306</span>        BLOOM_BLOCK_SIZES[t]);<a name="line.306"></a>
+<span class="sourceLineNo">307</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>    cacheConf = new CacheConfig(conf);<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCK_SIZES[t]).build();<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    StoreFileWriter w = new StoreFileWriter.Builder(conf, cacheConf, fs)<a name="line.310"></a>
+<span class="sourceLineNo">311</span>            .withOutputDir(TEST_UTIL.getDataTestDir())<a name="line.311"></a>
+<span class="sourceLineNo">312</span>            .withBloomType(bt)<a name="line.312"></a>
+<span class="sourceLineNo">313</span>            .withFileContext(meta)<a name="line.313"></a>
+<span class="sourceLineNo">314</span>            .build();<a name="line.314"></a>
+<span class="sourceLineNo">315</span><a name="line.315"></a>
+<span class="sourceLineNo">316</span>    assertTrue(w.hasGeneralBloom());<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    assertTrue(w.getGeneralBloomWriter() instanceof CompoundBloomFilterWriter);<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    CompoundBloomFilterWriter cbbf =<a name="line.318"></a>
+<span class="sourceLineNo">319</span>        (CompoundBloomFilterWriter) w.getGeneralBloomWriter();<a name="line.319"></a>
+<span class="sourceLineNo">320</span><a name="line.320"></a>
+<span class="sourceLineNo">321</span>    int keyCount = 0;<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    KeyValue prev = null;<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    LOG.debug("Total keys/values to insert: " + kvs.size());<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    for (KeyValue kv : kvs) {<a name="line.324"></a>
+<span class="sourceLineNo">325</span>      w.append(kv);<a name="line.325"></a>
+<span class="sourceLineNo">326</span><a name="line.326"></a>
+<span class="sourceLineNo">327</span>      // Validate the key count in the Bloom filter.<a name="line.327"></a>
+<span class="sourceLineNo">328</span>      boolean newKey = true;<a name="line.328"></a>
+<span class="sourceLineNo">329</span>      if (prev != null) {<a name="line.329"></a>
+<span class="sourceLineNo">330</span>        newKey = !(bt == BloomType.ROW ? CellUtil.matchingRows(kv,<a name="line.330"></a>
+<span class="sourceLineNo">331</span>            prev) : CellUtil.matchingRowColumn(kv, prev));<a name="line.331"></a>
+<span class="sourceLineNo">332</span>      }<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      if (newKey)<a name="line.333"></a>
+<span class="sourceLineNo">334</span>        ++keyCount;<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      assertEquals(keyCount, cbbf.getKeyCount());<a name="line.335"></a>
+<span class="sourceLineNo">336</span><a name="line.336"></a>
+<span class="sourceLineNo">337</span>      prev = kv;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    }<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    w.close();<a name="line.339"></a>
+<span class="sourceLineNo">340</span><a name="line.340"></a>
+<span class="sourceLineNo">341</span>    return w.getPath();<a name="line.341"></a>
+<span class="sourceLineNo">342</span>  }<a name="line.342"></a>
+<span class="sourceLineNo">343</span><a name="line.343"></a>
+<span class="sourceLineNo">344</span>  @Test<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  public void testCompoundBloomSizing() {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    int bloomBlockByteSize = 4096;<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    int bloomBlockBitSize = bloomBlockByteSize * 8;<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    double targetErrorRate = 0.01;<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    long maxKeysPerChunk = BloomFilterUtil.idealMaxKeys(bloomBlockBitSize,<a name="line.349"></a>
+<span class="sourceLineNo">350</span>        targetErrorRate);<a name="line.350"></a>
+<span class="sourceLineNo">351</span><a name="line.351"></a>
+<span class="sourceLineNo">352</span>    long bloomSize1 = bloomBlockByteSize * 8;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    long bloomSize2 = BloomFilterUtil.computeBitSize(maxKeysPerChunk,<a name="line.353"></a>
+<span class="sourceLineNo">354</span>        targetErrorRate);<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>    double bloomSizeRatio = (bloomSize2 * 1.0 / bloomSize1);<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    assertTrue(Math.abs(bloomSizeRatio - 0.9999) &lt; 0.0001);<a name="line.357"></a>
+<span class="sourceLineNo">358</span>  }<a name="line.358"></a>
+<span class="sourceLineNo">359</span><a name="line.359"></a>
+<span class="sourceLineNo">360</span>  @Test<a name="line.360"></a>
+<span class="sourceLineNo">361</span>  public void testCreateKey() {<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    byte[] row = "myRow".getBytes();<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    byte[] qualifier = "myQualifier".getBytes();<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    // Mimic what Storefile.createBloomKeyValue() does<a name="line.364"></a>
+<span class="sourceLineNo">365</span>    byte[] rowKey = KeyValueUtil.createFirstOnRow(row, 0, row.length, new byte[0], 0, 0, row, 0, 0).getKey();<a name="line.365"></a>
+<span class="sourceLineNo">366</span>    byte[] rowColKey = KeyValueUtil.createFirstOnRow(row, 0, row.length,<a name="line.366"></a>
+<span class="sourceLineNo">367</span>        new byte[0], 0, 0, qualifier, 0, qualifier.length).getKey();<a name="line.367"></a>
+<span class="sourceLineNo">368</span>    KeyValue rowKV = KeyValueUtil.createKeyValueFromKey(rowKey);<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    KeyValue rowColKV = KeyValueUtil.createKeyValueFromKey(rowColKey);<a name="line.369"></a>
+<span class="sourceLineNo">370</span>    assertEquals(rowKV.getTimestamp(), rowColKV.getTimestamp());<a name="line.370"></a>
+<span class="sourceLineNo">371</span>    assertEquals(Bytes.toStringBinary(rowKV.getRowArray(), rowKV.getRowOffset(),<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      rowKV.getRowLength()), Bytes.toStringBinary(rowColKV.getRowArray(), rowColKV.getRowOffset(),<a name="line.372"></a>
+<span class="sourceLineNo">373</span>      rowColKV.getRowLength()));<a name="line.373"></a>
+<span class="sourceLineNo">374</span>    assertEquals(0, rowKV.getQualifierLength());<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  }<a name="line.375"></a>
 <span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>}<a name="line.377"></a>
-<span class="sourceLineNo">378</span><a name="line.378"></a>
+<span class="sourceLineNo">377</span><a name="line.377"></a>
+<span class="sourceLineNo">378</span>}<a name="line.378"></a>
+<span class="sourceLineNo">379</span><a name="line.379"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html
index a0a5cea7..a08c89e 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html
@@ -932,17 +932,17 @@
 <span class="sourceLineNo">924</span>    scan.setTimeRange(27, 50);<a name="line.924"></a>
 <span class="sourceLineNo">925</span>    scan.setColumnFamilyTimeRange(family, 7, 50);<a name="line.925"></a>
 <span class="sourceLineNo">926</span>    assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE));<a name="line.926"></a>
-<span class="sourceLineNo">927</span><a name="line.927"></a>
-<span class="sourceLineNo">928</span>  }<a name="line.928"></a>
-<span class="sourceLineNo">929</span><a name="line.929"></a>
-<span class="sourceLineNo">930</span>  @Test<a name="line.930"></a>
-<span class="sourceLineNo">931</span>  public void testCacheOnWriteEvictOnClose() throws Exception {<a name="line.931"></a>
-<span class="sourceLineNo">932</span>    Configuration conf = this.conf;<a name="line.932"></a>
-<span class="sourceLineNo">933</span><a name="line.933"></a>
-<span class="sourceLineNo">934</span>    // Find a home for our files (regiondir ("7e0102") and familyname).<a name="line.934"></a>
-<span class="sourceLineNo">935</span>    Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC");<a name="line.935"></a>
-<span class="sourceLineNo">936</span><a name="line.936"></a>
-<span class="sourceLineNo">937</span>    // Grab the block cache and get the initial hit/miss counts<a name="line.937"></a>
+<span class="sourceLineNo">927</span>  }<a name="line.927"></a>
+<span class="sourceLineNo">928</span><a name="line.928"></a>
+<span class="sourceLineNo">929</span>  @Test<a name="line.929"></a>
+<span class="sourceLineNo">930</span>  public void testCacheOnWriteEvictOnClose() throws Exception {<a name="line.930"></a>
+<span class="sourceLineNo">931</span>    Configuration conf = this.conf;<a name="line.931"></a>
+<span class="sourceLineNo">932</span><a name="line.932"></a>
+<span class="sourceLineNo">933</span>    // Find a home for our files (regiondir ("7e0102") and familyname).<a name="line.933"></a>
+<span class="sourceLineNo">934</span>    Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC");<a name="line.934"></a>
+<span class="sourceLineNo">935</span><a name="line.935"></a>
+<span class="sourceLineNo">936</span>    // Grab the block cache and get the initial hit/miss counts<a name="line.936"></a>
+<span class="sourceLineNo">937</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.937"></a>
 <span class="sourceLineNo">938</span>    BlockCache bc = new CacheConfig(conf).getBlockCache();<a name="line.938"></a>
 <span class="sourceLineNo">939</span>    assertNotNull(bc);<a name="line.939"></a>
 <span class="sourceLineNo">940</span>    CacheStats cs = bc.getStats();<a name="line.940"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html
index 89cf09c..25308a1 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html
@@ -48,175 +48,177 @@
 <span class="sourceLineNo">040</span>import org.apache.hadoop.hbase.TableName;<a name="line.40"></a>
 <span class="sourceLineNo">041</span>import org.apache.hadoop.hbase.client.Get;<a name="line.41"></a>
 <span class="sourceLineNo">042</span>import org.apache.hadoop.hbase.client.Result;<a name="line.42"></a>
-<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.43"></a>
-<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.44"></a>
-<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.45"></a>
-<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.47"></a>
-<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.48"></a>
-<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.49"></a>
-<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.50"></a>
-<span class="sourceLineNo">051</span>import org.junit.ClassRule;<a name="line.51"></a>
-<span class="sourceLineNo">052</span>import org.junit.Rule;<a name="line.52"></a>
-<span class="sourceLineNo">053</span>import org.junit.Test;<a name="line.53"></a>
-<span class="sourceLineNo">054</span>import org.junit.experimental.categories.Category;<a name="line.54"></a>
-<span class="sourceLineNo">055</span>import org.junit.rules.TestName;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>import org.slf4j.Logger;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>import org.slf4j.LoggerFactory;<a name="line.57"></a>
-<span class="sourceLineNo">058</span><a name="line.58"></a>
-<span class="sourceLineNo">059</span>/**<a name="line.59"></a>
-<span class="sourceLineNo">060</span> * Tests around replay of recovered.edits content.<a name="line.60"></a>
-<span class="sourceLineNo">061</span> */<a name="line.61"></a>
-<span class="sourceLineNo">062</span>@Category({MediumTests.class})<a name="line.62"></a>
-<span class="sourceLineNo">063</span>public class TestRecoveredEdits {<a name="line.63"></a>
-<span class="sourceLineNo">064</span><a name="line.64"></a>
-<span class="sourceLineNo">065</span>  @ClassRule<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.66"></a>
-<span class="sourceLineNo">067</span>      HBaseClassTestRule.forClass(TestRecoveredEdits.class);<a name="line.67"></a>
-<span class="sourceLineNo">068</span><a name="line.68"></a>
-<span class="sourceLineNo">069</span>  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.69"></a>
-<span class="sourceLineNo">070</span>  private static final Logger LOG = LoggerFactory.getLogger(TestRecoveredEdits.class);<a name="line.70"></a>
-<span class="sourceLineNo">071</span>  @Rule public TestName testName = new TestName();<a name="line.71"></a>
-<span class="sourceLineNo">072</span><a name="line.72"></a>
-<span class="sourceLineNo">073</span>  /**<a name="line.73"></a>
-<span class="sourceLineNo">074</span>   * HBASE-12782 ITBLL fails for me if generator does anything but 5M per maptask.<a name="line.74"></a>
-<span class="sourceLineNo">075</span>   * Create a region. Close it. Then copy into place a file to replay, one that is bigger than<a name="line.75"></a>
-<span class="sourceLineNo">076</span>   * configured flush size so we bring on lots of flushes.  Then reopen and confirm all edits<a name="line.76"></a>
-<span class="sourceLineNo">077</span>   * made it in.<a name="line.77"></a>
-<span class="sourceLineNo">078</span>   * @throws IOException<a name="line.78"></a>
-<span class="sourceLineNo">079</span>   */<a name="line.79"></a>
-<span class="sourceLineNo">080</span>  @Test<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  public void testReplayWorksThoughLotsOfFlushing() throws<a name="line.81"></a>
-<span class="sourceLineNo">082</span>      IOException {<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    for(MemoryCompactionPolicy policy : MemoryCompactionPolicy.values()) {<a name="line.83"></a>
-<span class="sourceLineNo">084</span>      testReplayWorksWithMemoryCompactionPolicy(policy);<a name="line.84"></a>
-<span class="sourceLineNo">085</span>    }<a name="line.85"></a>
-<span class="sourceLineNo">086</span>  }<a name="line.86"></a>
-<span class="sourceLineNo">087</span><a name="line.87"></a>
-<span class="sourceLineNo">088</span>  private void testReplayWorksWithMemoryCompactionPolicy(MemoryCompactionPolicy policy) throws<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    IOException {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    Configuration conf = new Configuration(TEST_UTIL.getConfiguration());<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    // Set it so we flush every 1M or so.  Thats a lot.<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024*1024);<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    conf.set(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY, String.valueOf(policy).toLowerCase());<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    // The file of recovered edits has a column family of 'meta'. Also has an encoded regionname<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    // of 4823016d8fca70b25503ee07f4c6d79f which needs to match on replay.<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    final String encodedRegionName = "4823016d8fca70b25503ee07f4c6d79f";<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(testName.getMethodName()));<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    final String columnFamily = "meta";<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    byte [][] columnFamilyAsByteArray = new byte [][] {Bytes.toBytes(columnFamily)};<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    htd.addFamily(new HColumnDescriptor(columnFamily));<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    HRegionInfo hri = new HRegionInfo(htd.getTableName()) {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      @Override<a name="line.102"></a>
-<span class="sourceLineNo">103</span>      public synchronized String getEncodedName() {<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        return encodedRegionName;<a name="line.104"></a>
-<span class="sourceLineNo">105</span>      }<a name="line.105"></a>
-<span class="sourceLineNo">106</span><a name="line.106"></a>
-<span class="sourceLineNo">107</span>      // Cache the name because lots of lookups.<a name="line.107"></a>
-<span class="sourceLineNo">108</span>      private byte [] encodedRegionNameAsBytes = null;<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      @Override<a name="line.109"></a>
-<span class="sourceLineNo">110</span>      public synchronized byte[] getEncodedNameAsBytes() {<a name="line.110"></a>
-<span class="sourceLineNo">111</span>        if (encodedRegionNameAsBytes == null) {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>          this.encodedRegionNameAsBytes = Bytes.toBytes(getEncodedName());<a name="line.112"></a>
-<span class="sourceLineNo">113</span>        }<a name="line.113"></a>
-<span class="sourceLineNo">114</span>        return this.encodedRegionNameAsBytes;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>      }<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    };<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    Path hbaseRootDir = TEST_UTIL.getDataTestDir();<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    Path tableDir = FSUtils.getTableDir(hbaseRootDir, htd.getTableName());<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    HRegionFileSystem hrfs =<a name="line.121"></a>
-<span class="sourceLineNo">122</span>        new HRegionFileSystem(TEST_UTIL.getConfiguration(), fs, tableDir, hri);<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    if (fs.exists(hrfs.getRegionDir())) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      LOG.info("Region directory already exists. Deleting.");<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      fs.delete(hrfs.getRegionDir(), true);<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    HRegion region = HRegion.createHRegion(hri, hbaseRootDir, conf, htd, null);<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    assertEquals(encodedRegionName, region.getRegionInfo().getEncodedName());<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    List&lt;String&gt; storeFiles = region.getStoreFileList(columnFamilyAsByteArray);<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    // There should be no store files.<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    assertTrue(storeFiles.isEmpty());<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    region.close();<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    Path regionDir = region.getRegionDir(hbaseRootDir, hri);<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    Path recoveredEditsDir = WALSplitter.getRegionDirRecoveredEditsDir(regionDir);<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    // This is a little fragile getting this path to a file of 10M of edits.<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    Path recoveredEditsFile = new Path(<a name="line.136"></a>
-<span class="sourceLineNo">137</span>      System.getProperty("test.build.classes", "target/test-classes"),<a name="line.137"></a>
-<span class="sourceLineNo">138</span>        "0000000000000016310");<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    // Copy this file under the region's recovered.edits dir so it is replayed on reopen.<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    Path destination = new Path(recoveredEditsDir, recoveredEditsFile.getName());<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    fs.copyToLocalFile(recoveredEditsFile, destination);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    assertTrue(fs.exists(destination));<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    // Now the file 0000000000000016310 is under recovered.edits, reopen the region to replay.<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    region = HRegion.openHRegion(region, null);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    assertEquals(encodedRegionName, region.getRegionInfo().getEncodedName());<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    storeFiles = region.getStoreFileList(columnFamilyAsByteArray);<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    // Our 0000000000000016310 is 10MB. Most of the edits are for one region. Lets assume that if<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    // we flush at 1MB, that there are at least 3 flushed files that are there because of the<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    // replay of edits.<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    if(policy == MemoryCompactionPolicy.EAGER || policy == MemoryCompactionPolicy.ADAPTIVE) {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      assertTrue("Files count=" + storeFiles.size(), storeFiles.size() &gt;= 1);<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    } else {<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      assertTrue("Files count=" + storeFiles.size(), storeFiles.size() &gt; 10);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    // Now verify all edits made it into the region.<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    int count = verifyAllEditsMadeItIn(fs, conf, recoveredEditsFile, region);<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    LOG.info("Checked " + count + " edits made it in");<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>  /**<a name="line.160"></a>
-<span class="sourceLineNo">161</span>   * @param fs<a name="line.161"></a>
-<span class="sourceLineNo">162</span>   * @param conf<a name="line.162"></a>
-<span class="sourceLineNo">163</span>   * @param edits<a name="line.163"></a>
-<span class="sourceLineNo">164</span>   * @param region<a name="line.164"></a>
-<span class="sourceLineNo">165</span>   * @return Return how many edits seen.<a name="line.165"></a>
-<span class="sourceLineNo">166</span>   * @throws IOException<a name="line.166"></a>
-<span class="sourceLineNo">167</span>   */<a name="line.167"></a>
-<span class="sourceLineNo">168</span>  private int verifyAllEditsMadeItIn(final FileSystem fs, final Configuration conf,<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      final Path edits, final HRegion region)<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  throws IOException {<a name="line.170"></a>
-<span class="sourceLineNo">171</span>    int count = 0;<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    // Based on HRegion#replayRecoveredEdits<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    WAL.Reader reader = null;<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    try {<a name="line.174"></a>
-<span class="sourceLineNo">175</span>      reader = WALFactory.createReader(fs, edits, conf);<a name="line.175"></a>
-<span class="sourceLineNo">176</span>      WAL.Entry entry;<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      while ((entry = reader.next()) != null) {<a name="line.177"></a>
-<span class="sourceLineNo">178</span>        WALKey key = entry.getKey();<a name="line.178"></a>
-<span class="sourceLineNo">179</span>        WALEdit val = entry.getEdit();<a name="line.179"></a>
-<span class="sourceLineNo">180</span>        count++;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>        // Check this edit is for this region.<a name="line.181"></a>
-<span class="sourceLineNo">182</span>        if (!Bytes.equals(key.getEncodedRegionName(),<a name="line.182"></a>
-<span class="sourceLineNo">183</span>            region.getRegionInfo().getEncodedNameAsBytes())) {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>          continue;<a name="line.184"></a>
-<span class="sourceLineNo">185</span>        }<a name="line.185"></a>
-<span class="sourceLineNo">186</span>        Cell previous = null;<a name="line.186"></a>
-<span class="sourceLineNo">187</span>        for (Cell cell: val.getCells()) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>          if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue;<a name="line.188"></a>
-<span class="sourceLineNo">189</span>          if (previous != null &amp;&amp; CellComparatorImpl.COMPARATOR.compareRows(previous, cell) == 0)<a name="line.189"></a>
-<span class="sourceLineNo">190</span>            continue;<a name="line.190"></a>
-<span class="sourceLineNo">191</span>          previous = cell;<a name="line.191"></a>
-<span class="sourceLineNo">192</span>          Get g = new Get(CellUtil.cloneRow(cell));<a name="line.192"></a>
-<span class="sourceLineNo">193</span>          Result r = region.get(g);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>          boolean found = false;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>          for (CellScanner scanner = r.cellScanner(); scanner.advance();) {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>            Cell current = scanner.current();<a name="line.196"></a>
-<span class="sourceLineNo">197</span>            if (PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, cell,<a name="line.197"></a>
-<span class="sourceLineNo">198</span>              current) == 0) {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>              found = true;<a name="line.199"></a>
-<span class="sourceLineNo">200</span>              break;<a name="line.200"></a>
-<span class="sourceLineNo">201</span>            }<a name="line.201"></a>
-<span class="sourceLineNo">202</span>          }<a name="line.202"></a>
-<span class="sourceLineNo">203</span>          assertTrue("Failed to find " + cell, found);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>        }<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      }<a name="line.205"></a>
-<span class="sourceLineNo">206</span>    } finally {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      if (reader != null) reader.close();<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    }<a name="line.208"></a>
-<span class="sourceLineNo">209</span>    return count;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  }<a name="line.210"></a>
-<span class="sourceLineNo">211</span>}<a name="line.211"></a>
+<span class="sourceLineNo">043</span>import org.apache.hadoop.hbase.io.hfile.CacheConfig;<a name="line.43"></a>
+<span class="sourceLineNo">044</span>import org.apache.hadoop.hbase.testclassification.MediumTests;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>import org.apache.hadoop.hbase.util.Bytes;<a name="line.45"></a>
+<span class="sourceLineNo">046</span>import org.apache.hadoop.hbase.util.FSUtils;<a name="line.46"></a>
+<span class="sourceLineNo">047</span>import org.apache.hadoop.hbase.wal.WAL;<a name="line.47"></a>
+<span class="sourceLineNo">048</span>import org.apache.hadoop.hbase.wal.WALEdit;<a name="line.48"></a>
+<span class="sourceLineNo">049</span>import org.apache.hadoop.hbase.wal.WALFactory;<a name="line.49"></a>
+<span class="sourceLineNo">050</span>import org.apache.hadoop.hbase.wal.WALKey;<a name="line.50"></a>
+<span class="sourceLineNo">051</span>import org.apache.hadoop.hbase.wal.WALSplitter;<a name="line.51"></a>
+<span class="sourceLineNo">052</span>import org.junit.ClassRule;<a name="line.52"></a>
+<span class="sourceLineNo">053</span>import org.junit.Rule;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>import org.junit.Test;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>import org.junit.experimental.categories.Category;<a name="line.55"></a>
+<span class="sourceLineNo">056</span>import org.junit.rules.TestName;<a name="line.56"></a>
+<span class="sourceLineNo">057</span>import org.slf4j.Logger;<a name="line.57"></a>
+<span class="sourceLineNo">058</span>import org.slf4j.LoggerFactory;<a name="line.58"></a>
+<span class="sourceLineNo">059</span><a name="line.59"></a>
+<span class="sourceLineNo">060</span>/**<a name="line.60"></a>
+<span class="sourceLineNo">061</span> * Tests around replay of recovered.edits content.<a name="line.61"></a>
+<span class="sourceLineNo">062</span> */<a name="line.62"></a>
+<span class="sourceLineNo">063</span>@Category({MediumTests.class})<a name="line.63"></a>
+<span class="sourceLineNo">064</span>public class TestRecoveredEdits {<a name="line.64"></a>
+<span class="sourceLineNo">065</span><a name="line.65"></a>
+<span class="sourceLineNo">066</span>  @ClassRule<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.67"></a>
+<span class="sourceLineNo">068</span>      HBaseClassTestRule.forClass(TestRecoveredEdits.class);<a name="line.68"></a>
+<span class="sourceLineNo">069</span><a name="line.69"></a>
+<span class="sourceLineNo">070</span>  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();<a name="line.70"></a>
+<span class="sourceLineNo">071</span>  private static final Logger LOG = LoggerFactory.getLogger(TestRecoveredEdits.class);<a name="line.71"></a>
+<span class="sourceLineNo">072</span>  @Rule public TestName testName = new TestName();<a name="line.72"></a>
+<span class="sourceLineNo">073</span><a name="line.73"></a>
+<span class="sourceLineNo">074</span>  /**<a name="line.74"></a>
+<span class="sourceLineNo">075</span>   * HBASE-12782 ITBLL fails for me if generator does anything but 5M per maptask.<a name="line.75"></a>
+<span class="sourceLineNo">076</span>   * Create a region. Close it. Then copy into place a file to replay, one that is bigger than<a name="line.76"></a>
+<span class="sourceLineNo">077</span>   * configured flush size so we bring on lots of flushes.  Then reopen and confirm all edits<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   * made it in.<a name="line.78"></a>
+<span class="sourceLineNo">079</span>   * @throws IOException<a name="line.79"></a>
+<span class="sourceLineNo">080</span>   */<a name="line.80"></a>
+<span class="sourceLineNo">081</span>  @Test<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public void testReplayWorksThoughLotsOfFlushing() throws<a name="line.82"></a>
+<span class="sourceLineNo">083</span>      IOException {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration());<a name="line.84"></a>
+<span class="sourceLineNo">085</span>    for(MemoryCompactionPolicy policy : MemoryCompactionPolicy.values()) {<a name="line.85"></a>
+<span class="sourceLineNo">086</span>      testReplayWorksWithMemoryCompactionPolicy(policy);<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    }<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  }<a name="line.88"></a>
+<span class="sourceLineNo">089</span><a name="line.89"></a>
+<span class="sourceLineNo">090</span>  private void testReplayWorksWithMemoryCompactionPolicy(MemoryCompactionPolicy policy) throws<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    IOException {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    Configuration conf = new Configuration(TEST_UTIL.getConfiguration());<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    // Set it so we flush every 1M or so.  Thats a lot.<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024*1024);<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    conf.set(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY, String.valueOf(policy).toLowerCase());<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    // The file of recovered edits has a column family of 'meta'. Also has an encoded regionname<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    // of 4823016d8fca70b25503ee07f4c6d79f which needs to match on replay.<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    final String encodedRegionName = "4823016d8fca70b25503ee07f4c6d79f";<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(testName.getMethodName()));<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    final String columnFamily = "meta";<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    byte [][] columnFamilyAsByteArray = new byte [][] {Bytes.toBytes(columnFamily)};<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    htd.addFamily(new HColumnDescriptor(columnFamily));<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    HRegionInfo hri = new HRegionInfo(htd.getTableName()) {<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      @Override<a name="line.104"></a>
+<span class="sourceLineNo">105</span>      public synchronized String getEncodedName() {<a name="line.105"></a>
+<span class="sourceLineNo">106</span>        return encodedRegionName;<a name="line.106"></a>
+<span class="sourceLineNo">107</span>      }<a name="line.107"></a>
+<span class="sourceLineNo">108</span><a name="line.108"></a>
+<span class="sourceLineNo">109</span>      // Cache the name because lots of lookups.<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      private byte [] encodedRegionNameAsBytes = null;<a name="line.110"></a>
+<span class="sourceLineNo">111</span>      @Override<a name="line.111"></a>
+<span class="sourceLineNo">112</span>      public synchronized byte[] getEncodedNameAsBytes() {<a name="line.112"></a>
+<span class="sourceLineNo">113</span>        if (encodedRegionNameAsBytes == null) {<a name="line.113"></a>
+<span class="sourceLineNo">114</span>          this.encodedRegionNameAsBytes = Bytes.toBytes(getEncodedName());<a name="line.114"></a>
+<span class="sourceLineNo">115</span>        }<a name="line.115"></a>
+<span class="sourceLineNo">116</span>        return this.encodedRegionNameAsBytes;<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      }<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    };<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    Path hbaseRootDir = TEST_UTIL.getDataTestDir();<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    Path tableDir = FSUtils.getTableDir(hbaseRootDir, htd.getTableName());<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    HRegionFileSystem hrfs =<a name="line.123"></a>
+<span class="sourceLineNo">124</span>        new HRegionFileSystem(TEST_UTIL.getConfiguration(), fs, tableDir, hri);<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    if (fs.exists(hrfs.getRegionDir())) {<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      LOG.info("Region directory already exists. Deleting.");<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      fs.delete(hrfs.getRegionDir(), true);<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    HRegion region = HRegion.createHRegion(hri, hbaseRootDir, conf, htd, null);<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    assertEquals(encodedRegionName, region.getRegionInfo().getEncodedName());<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    List&lt;String&gt; storeFiles = region.getStoreFileList(columnFamilyAsByteArray);<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    // There should be no store files.<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    assertTrue(storeFiles.isEmpty());<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    region.close();<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    Path regionDir = region.getRegionDir(hbaseRootDir, hri);<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    Path recoveredEditsDir = WALSplitter.getRegionDirRecoveredEditsDir(regionDir);<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    // This is a little fragile getting this path to a file of 10M of edits.<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    Path recoveredEditsFile = new Path(<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      System.getProperty("test.build.classes", "target/test-classes"),<a name="line.139"></a>
+<span class="sourceLineNo">140</span>        "0000000000000016310");<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    // Copy this file under the region's recovered.edits dir so it is replayed on reopen.<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    Path destination = new Path(recoveredEditsDir, recoveredEditsFile.getName());<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    fs.copyToLocalFile(recoveredEditsFile, destination);<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    assertTrue(fs.exists(destination));<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    // Now the file 0000000000000016310 is under recovered.edits, reopen the region to replay.<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    region = HRegion.openHRegion(region, null);<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    assertEquals(encodedRegionName, region.getRegionInfo().getEncodedName());<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    storeFiles = region.getStoreFileList(columnFamilyAsByteArray);<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    // Our 0000000000000016310 is 10MB. Most of the edits are for one region. Lets assume that if<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    // we flush at 1MB, that there are at least 3 flushed files that are there because of the<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    // replay of edits.<a name="line.151"></a>
+<span class="sourceLineNo">152</span>    if(policy == MemoryCompactionPolicy.EAGER || policy == MemoryCompactionPolicy.ADAPTIVE) {<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      assertTrue("Files count=" + storeFiles.size(), storeFiles.size() &gt;= 1);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    } else {<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      assertTrue("Files count=" + storeFiles.size(), storeFiles.size() &gt; 10);<a name="line.155"></a>
+<span class="sourceLineNo">156</span>    }<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    // Now verify all edits made it into the region.<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    int count = verifyAllEditsMadeItIn(fs, conf, recoveredEditsFile, region);<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    LOG.info("Checked " + count + " edits made it in");<a name="line.159"></a>
+<span class="sourceLineNo">160</span>  }<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>  /**<a name="line.162"></a>
+<span class="sourceLineNo">163</span>   * @param fs<a name="line.163"></a>
+<span class="sourceLineNo">164</span>   * @param conf<a name="line.164"></a>
+<span class="sourceLineNo">165</span>   * @param edits<a name="line.165"></a>
+<span class="sourceLineNo">166</span>   * @param region<a name="line.166"></a>
+<span class="sourceLineNo">167</span>   * @return Return how many edits seen.<a name="line.167"></a>
+<span class="sourceLineNo">168</span>   * @throws IOException<a name="line.168"></a>
+<span class="sourceLineNo">169</span>   */<a name="line.169"></a>
+<span class="sourceLineNo">170</span>  private int verifyAllEditsMadeItIn(final FileSystem fs, final Configuration conf,<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      final Path edits, final HRegion region)<a name="line.171"></a>
+<span class="sourceLineNo">172</span>  throws IOException {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    int count = 0;<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    // Based on HRegion#replayRecoveredEdits<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    WAL.Reader reader = null;<a name="line.175"></a>
+<span class="sourceLineNo">176</span>    try {<a name="line.176"></a>
+<span class="sourceLineNo">177</span>      reader = WALFactory.createReader(fs, edits, conf);<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      WAL.Entry entry;<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      while ((entry = reader.next()) != null) {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>        WALKey key = entry.getKey();<a name="line.180"></a>
+<span class="sourceLineNo">181</span>        WALEdit val = entry.getEdit();<a name="line.181"></a>
+<span class="sourceLineNo">182</span>        count++;<a name="line.182"></a>
+<span class="sourceLineNo">183</span>        // Check this edit is for this region.<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        if (!Bytes.equals(key.getEncodedRegionName(),<a name="line.184"></a>
+<span class="sourceLineNo">185</span>            region.getRegionInfo().getEncodedNameAsBytes())) {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>          continue;<a name="line.186"></a>
+<span class="sourceLineNo">187</span>        }<a name="line.187"></a>
+<span class="sourceLineNo">188</span>        Cell previous = null;<a name="line.188"></a>
+<span class="sourceLineNo">189</span>        for (Cell cell: val.getCells()) {<a name="line.189"></a>
+<span class="sourceLineNo">190</span>          if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue;<a name="line.190"></a>
+<span class="sourceLineNo">191</span>          if (previous != null &amp;&amp; CellComparatorImpl.COMPARATOR.compareRows(previous, cell) == 0)<a name="line.191"></a>
+<span class="sourceLineNo">192</span>            continue;<a name="line.192"></a>
+<span class="sourceLineNo">193</span>          previous = cell;<a name="line.193"></a>
+<span class="sourceLineNo">194</span>          Get g = new Get(CellUtil.cloneRow(cell));<a name="line.194"></a>
+<span class="sourceLineNo">195</span>          Result r = region.get(g);<a name="line.195"></a>
+<span class="sourceLineNo">196</span>          boolean found = false;<a name="line.196"></a>
+<span class="sourceLineNo">197</span>          for (CellScanner scanner = r.cellScanner(); scanner.advance();) {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>            Cell current = scanner.current();<a name="line.198"></a>
+<span class="sourceLineNo">199</span>            if (PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, cell,<a name="line.199"></a>
+<span class="sourceLineNo">200</span>              current) == 0) {<a name="line.200"></a>
+<span class="sourceLineNo">201</span>              found = true;<a name="line.201"></a>
+<span class="sourceLineNo">202</span>              break;<a name="line.202"></a>
+<span class="sourceLineNo">203</span>            }<a name="line.203"></a>
+<span class="sourceLineNo">204</span>          }<a name="line.204"></a>
+<span class="sourceLineNo">205</span>          assertTrue("Failed to find " + cell, found);<a name="line.205"></a>
+<span class="sourceLineNo">206</span>        }<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    } finally {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>      if (reader != null) reader.close();<a name="line.209"></a>
+<span class="sourceLineNo">210</span>    }<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    return count;<a name="line.211"></a>
+<span class="sourceLineNo">212</span>  }<a name="line.212"></a>
+<span class="sourceLineNo">213</span>}<a name="line.213"></a>
 
 
 


[05/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html
index 76a9ecc..920f7ff 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html
@@ -532,271 +532,264 @@
 <span class="sourceLineNo">524</span>  * @throws IOException<a name="line.524"></a>
 <span class="sourceLineNo">525</span>  */<a name="line.525"></a>
 <span class="sourceLineNo">526</span>  @Test<a name="line.526"></a>
-<span class="sourceLineNo">527</span> public void testMidKeyOnLeafIndexBlockBoundary() throws IOException {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),<a name="line.528"></a>
-<span class="sourceLineNo">529</span>       "hfile_for_midkey");<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   int maxChunkSize = 512;<a name="line.530"></a>
-<span class="sourceLineNo">531</span>   conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);<a name="line.531"></a>
-<span class="sourceLineNo">532</span>   // should open hfile.block.index.cacheonwrite<a name="line.532"></a>
-<span class="sourceLineNo">533</span>   conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.533"></a>
-<span class="sourceLineNo">534</span><a name="line.534"></a>
-<span class="sourceLineNo">535</span>   CacheConfig cacheConf = new CacheConfig(conf);<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   BlockCache blockCache = cacheConf.getBlockCache();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   // Evict all blocks that were cached-on-write by the previous invocation.<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   blockCache.evictBlocksByHfileName(hfilePath.getName());<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   // Write the HFile<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   {<a name="line.540"></a>
-<span class="sourceLineNo">541</span>     HFileContext meta = new HFileContextBuilder()<a name="line.541"></a>
-<span class="sourceLineNo">542</span>                         .withBlockSize(SMALL_BLOCK_SIZE)<a name="line.542"></a>
-<span class="sourceLineNo">543</span>                         .withCompression(Algorithm.NONE)<a name="line.543"></a>
-<span class="sourceLineNo">544</span>                         .withDataBlockEncoding(DataBlockEncoding.NONE)<a name="line.544"></a>
-<span class="sourceLineNo">545</span>                         .build();<a name="line.545"></a>
-<span class="sourceLineNo">546</span>     HFile.Writer writer =<a name="line.546"></a>
-<span class="sourceLineNo">547</span>           HFile.getWriterFactory(conf, cacheConf)<a name="line.547"></a>
-<span class="sourceLineNo">548</span>               .withPath(fs, hfilePath)<a name="line.548"></a>
-<span class="sourceLineNo">549</span>               .withFileContext(meta)<a name="line.549"></a>
-<span class="sourceLineNo">550</span>               .create();<a name="line.550"></a>
-<span class="sourceLineNo">551</span>     Random rand = new Random(19231737);<a name="line.551"></a>
-<span class="sourceLineNo">552</span>     byte[] family = Bytes.toBytes("f");<a name="line.552"></a>
-<span class="sourceLineNo">553</span>     byte[] qualifier = Bytes.toBytes("q");<a name="line.553"></a>
-<span class="sourceLineNo">554</span>     int kvNumberToBeWritten = 16;<a name="line.554"></a>
-<span class="sourceLineNo">555</span>     // the new generated hfile will contain 2 leaf-index blocks and 16 data blocks,<a name="line.555"></a>
-<span class="sourceLineNo">556</span>     // midkey is just on the boundary of the first leaf-index block<a name="line.556"></a>
-<span class="sourceLineNo">557</span>     for (int i = 0; i &lt; kvNumberToBeWritten; ++i) {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>       byte[] row = RandomKeyValueUtil.randomOrderedFixedLengthKey(rand, i, 30);<a name="line.558"></a>
-<span class="sourceLineNo">559</span><a name="line.559"></a>
-<span class="sourceLineNo">560</span>       // Key will be interpreted by KeyValue.KEY_COMPARATOR<a name="line.560"></a>
-<span class="sourceLineNo">561</span>       KeyValue kv =<a name="line.561"></a>
-<span class="sourceLineNo">562</span>             new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),<a name="line.562"></a>
-<span class="sourceLineNo">563</span>                 RandomKeyValueUtil.randomFixedLengthValue(rand, SMALL_BLOCK_SIZE));<a name="line.563"></a>
-<span class="sourceLineNo">564</span>       writer.append(kv);<a name="line.564"></a>
-<span class="sourceLineNo">565</span>     }<a name="line.565"></a>
-<span class="sourceLineNo">566</span>     writer.close();<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   }<a name="line.567"></a>
-<span class="sourceLineNo">568</span><a name="line.568"></a>
-<span class="sourceLineNo">569</span>   // close hfile.block.index.cacheonwrite<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false);<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>   // Read the HFile<a name="line.572"></a>
-<span class="sourceLineNo">573</span>   HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>   boolean hasArrayIndexOutOfBoundsException = false;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   try {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>     // get the mid-key.<a name="line.577"></a>
-<span class="sourceLineNo">578</span>     reader.midKey();<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   } catch (ArrayIndexOutOfBoundsException e) {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>     hasArrayIndexOutOfBoundsException = true;<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   } finally {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>     reader.close();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>   }<a name="line.583"></a>
-<span class="sourceLineNo">584</span><a name="line.584"></a>
-<span class="sourceLineNo">585</span>   // to check if ArrayIndexOutOfBoundsException occurred<a name="line.585"></a>
-<span class="sourceLineNo">586</span>   assertFalse(hasArrayIndexOutOfBoundsException);<a name="line.586"></a>
-<span class="sourceLineNo">587</span> }<a name="line.587"></a>
-<span class="sourceLineNo">588</span><a name="line.588"></a>
-<span class="sourceLineNo">589</span>  /**<a name="line.589"></a>
-<span class="sourceLineNo">590</span>   * Testing block index through the HFile writer/reader APIs. Allows to test<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * setting index block size through configuration, intermediate-level index<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   * blocks, and caching index blocks on write.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>   *<a name="line.593"></a>
-<span class="sourceLineNo">594</span>   * @throws IOException<a name="line.594"></a>
-<span class="sourceLineNo">595</span>   */<a name="line.595"></a>
-<span class="sourceLineNo">596</span>  @Test<a name="line.596"></a>
-<span class="sourceLineNo">597</span>  public void testHFileWriterAndReader() throws IOException {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        "hfile_for_block_index");<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.600"></a>
-<span class="sourceLineNo">601</span>    BlockCache blockCache = cacheConf.getBlockCache();<a name="line.601"></a>
-<span class="sourceLineNo">602</span><a name="line.602"></a>
-<span class="sourceLineNo">603</span>    for (int testI = 0; testI &lt; INDEX_CHUNK_SIZES.length; ++testI) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      int indexBlockSize = INDEX_CHUNK_SIZES[testI];<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      int expectedNumLevels = EXPECTED_NUM_LEVELS[testI];<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      LOG.info("Index block size: " + indexBlockSize + ", compression: "<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          + compr);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      // Evict all blocks that were cached-on-write by the previous invocation.<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      blockCache.evictBlocksByHfileName(hfilePath.getName());<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>      conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize);<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      Set&lt;String&gt; keyStrSet = new HashSet&lt;&gt;();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[][] keys = new byte[NUM_KV][];<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      byte[][] values = new byte[NUM_KV][];<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>      // Write the HFile<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>        HFileContext meta = new HFileContextBuilder()<a name="line.618"></a>
-<span class="sourceLineNo">619</span>                            .withBlockSize(SMALL_BLOCK_SIZE)<a name="line.619"></a>
-<span class="sourceLineNo">620</span>                            .withCompression(compr)<a name="line.620"></a>
-<span class="sourceLineNo">621</span>                            .build();<a name="line.621"></a>
-<span class="sourceLineNo">622</span>        HFile.Writer writer =<a name="line.622"></a>
-<span class="sourceLineNo">623</span>            HFile.getWriterFactory(conf, cacheConf)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>                .withPath(fs, hfilePath)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>                .withFileContext(meta)<a name="line.625"></a>
-<span class="sourceLineNo">626</span>                .create();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>        Random rand = new Random(19231737);<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        byte[] family = Bytes.toBytes("f");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>        byte[] qualifier = Bytes.toBytes("q");<a name="line.629"></a>
-<span class="sourceLineNo">630</span>        for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.630"></a>
-<span class="sourceLineNo">631</span>          byte[] row = RandomKeyValueUtil.randomOrderedKey(rand, i);<a name="line.631"></a>
-<span class="sourceLineNo">632</span><a name="line.632"></a>
-<span class="sourceLineNo">633</span>          // Key will be interpreted by KeyValue.KEY_COMPARATOR<a name="line.633"></a>
-<span class="sourceLineNo">634</span>          KeyValue kv =<a name="line.634"></a>
-<span class="sourceLineNo">635</span>              new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>                  RandomKeyValueUtil.randomValue(rand));<a name="line.636"></a>
-<span class="sourceLineNo">637</span>          byte[] k = kv.getKey();<a name="line.637"></a>
-<span class="sourceLineNo">638</span>          writer.append(kv);<a name="line.638"></a>
-<span class="sourceLineNo">639</span>          keys[i] = k;<a name="line.639"></a>
-<span class="sourceLineNo">640</span>          values[i] = CellUtil.cloneValue(kv);<a name="line.640"></a>
-<span class="sourceLineNo">641</span>          keyStrSet.add(Bytes.toStringBinary(k));<a name="line.641"></a>
-<span class="sourceLineNo">642</span>          if (i &gt; 0) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>            assertTrue((PrivateCellUtil.compare(CellComparatorImpl.COMPARATOR, kv, keys[i - 1],<a name="line.643"></a>
-<span class="sourceLineNo">644</span>                0, keys[i - 1].length)) &gt; 0);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>          }<a name="line.645"></a>
-<span class="sourceLineNo">646</span>        }<a name="line.646"></a>
-<span class="sourceLineNo">647</span><a name="line.647"></a>
-<span class="sourceLineNo">648</span>        writer.close();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>      }<a name="line.649"></a>
-<span class="sourceLineNo">650</span><a name="line.650"></a>
-<span class="sourceLineNo">651</span>      // Read the HFile<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      assertEquals(expectedNumLevels,<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          reader.getTrailer().getNumDataIndexLevels());<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>      assertTrue(Bytes.equals(keys[0], ((KeyValue)reader.getFirstKey().get()).getKey()));<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      assertTrue(Bytes.equals(keys[NUM_KV - 1], ((KeyValue)reader.getLastKey().get()).getKey()));<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      LOG.info("Last key: " + Bytes.toStringBinary(keys[NUM_KV - 1]));<a name="line.658"></a>
-<span class="sourceLineNo">659</span><a name="line.659"></a>
-<span class="sourceLineNo">660</span>      for (boolean pread : new boolean[] { false, true }) {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>        HFileScanner scanner = reader.getScanner(true, pread);<a name="line.661"></a>
-<span class="sourceLineNo">662</span>        for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.662"></a>
-<span class="sourceLineNo">663</span>          checkSeekTo(keys, scanner, i);<a name="line.663"></a>
-<span class="sourceLineNo">664</span>          checkKeyValue("i=" + i, keys[i], values[i],<a name="line.664"></a>
-<span class="sourceLineNo">665</span>              ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue());<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        assertTrue(scanner.seekTo());<a name="line.667"></a>
-<span class="sourceLineNo">668</span>        for (int i = NUM_KV - 1; i &gt;= 0; --i) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          checkSeekTo(keys, scanner, i);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          checkKeyValue("i=" + i, keys[i], values[i],<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        }<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>      // Manually compute the mid-key and validate it.<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      HFile.Reader reader2 = reader;<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      HFileBlock.FSReader fsReader = reader2.getUncachedBlockReader();<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>      HFileBlock.BlockIterator iter = fsReader.blockRange(0,<a name="line.679"></a>
-<span class="sourceLineNo">680</span>          reader.getTrailer().getLoadOnOpenDataOffset());<a name="line.680"></a>
-<span class="sourceLineNo">681</span>      HFileBlock block;<a name="line.681"></a>
-<span class="sourceLineNo">682</span>      List&lt;byte[]&gt; blockKeys = new ArrayList&lt;&gt;();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>      while ((block = iter.nextBlock()) != null) {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        if (block.getBlockType() != BlockType.LEAF_INDEX)<a name="line.684"></a>
-<span class="sourceLineNo">685</span>          return;<a name="line.685"></a>
-<span class="sourceLineNo">686</span>        ByteBuff b = block.getBufferReadOnly();<a name="line.686"></a>
-<span class="sourceLineNo">687</span>        int n = b.getIntAfterPosition(0);<a name="line.687"></a>
-<span class="sourceLineNo">688</span>        // One int for the number of items, and n + 1 for the secondary index.<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        int entriesOffset = Bytes.SIZEOF_INT * (n + 2);<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>        // Get all the keys from the leaf index block. S<a name="line.691"></a>
-<span class="sourceLineNo">692</span>        for (int i = 0; i &lt; n; ++i) {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>          int keyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 1));<a name="line.693"></a>
-<span class="sourceLineNo">694</span>          int nextKeyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 2));<a name="line.694"></a>
-<span class="sourceLineNo">695</span>          int keyLen = nextKeyRelOffset - keyRelOffset;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          int keyOffset = b.arrayOffset() + entriesOffset + keyRelOffset +<a name="line.696"></a>
-<span class="sourceLineNo">697</span>              HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD;<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          byte[] blockKey = Arrays.copyOfRange(b.array(), keyOffset, keyOffset<a name="line.698"></a>
-<span class="sourceLineNo">699</span>              + keyLen);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>          String blockKeyStr = Bytes.toString(blockKey);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>          blockKeys.add(blockKey);<a name="line.701"></a>
+<span class="sourceLineNo">527</span>  public void testMidKeyOnLeafIndexBlockBoundary() throws IOException {<a name="line.527"></a>
+<span class="sourceLineNo">528</span>    Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_midkey");<a name="line.528"></a>
+<span class="sourceLineNo">529</span>    int maxChunkSize = 512;<a name="line.529"></a>
+<span class="sourceLineNo">530</span>    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>    // should open hfile.block.index.cacheonwrite<a name="line.531"></a>
+<span class="sourceLineNo">532</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.532"></a>
+<span class="sourceLineNo">533</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.533"></a>
+<span class="sourceLineNo">534</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    BlockCache blockCache = cacheConf.getBlockCache();<a name="line.535"></a>
+<span class="sourceLineNo">536</span>    // Evict all blocks that were cached-on-write by the previous invocation.<a name="line.536"></a>
+<span class="sourceLineNo">537</span>    blockCache.evictBlocksByHfileName(hfilePath.getName());<a name="line.537"></a>
+<span class="sourceLineNo">538</span>    // Write the HFile<a name="line.538"></a>
+<span class="sourceLineNo">539</span>    HFileContext meta =<a name="line.539"></a>
+<span class="sourceLineNo">540</span>        new HFileContextBuilder().withBlockSize(SMALL_BLOCK_SIZE).withCompression(Algorithm.NONE)<a name="line.540"></a>
+<span class="sourceLineNo">541</span>            .withDataBlockEncoding(DataBlockEncoding.NONE).build();<a name="line.541"></a>
+<span class="sourceLineNo">542</span>    HFile.Writer writer =<a name="line.542"></a>
+<span class="sourceLineNo">543</span>        HFile.getWriterFactory(conf, cacheConf).withPath(fs, hfilePath).withFileContext(meta)<a name="line.543"></a>
+<span class="sourceLineNo">544</span>            .create();<a name="line.544"></a>
+<span class="sourceLineNo">545</span>    Random rand = new Random(19231737);<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    byte[] family = Bytes.toBytes("f");<a name="line.546"></a>
+<span class="sourceLineNo">547</span>    byte[] qualifier = Bytes.toBytes("q");<a name="line.547"></a>
+<span class="sourceLineNo">548</span>    int kvNumberToBeWritten = 16;<a name="line.548"></a>
+<span class="sourceLineNo">549</span>    // the new generated hfile will contain 2 leaf-index blocks and 16 data blocks,<a name="line.549"></a>
+<span class="sourceLineNo">550</span>    // midkey is just on the boundary of the first leaf-index block<a name="line.550"></a>
+<span class="sourceLineNo">551</span>    for (int i = 0; i &lt; kvNumberToBeWritten; ++i) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>      byte[] row = RandomKeyValueUtil.randomOrderedFixedLengthKey(rand, i, 30);<a name="line.552"></a>
+<span class="sourceLineNo">553</span><a name="line.553"></a>
+<span class="sourceLineNo">554</span>      // Key will be interpreted by KeyValue.KEY_COMPARATOR<a name="line.554"></a>
+<span class="sourceLineNo">555</span>      KeyValue kv = new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),<a name="line.555"></a>
+<span class="sourceLineNo">556</span>          RandomKeyValueUtil.randomFixedLengthValue(rand, SMALL_BLOCK_SIZE));<a name="line.556"></a>
+<span class="sourceLineNo">557</span>      writer.append(kv);<a name="line.557"></a>
+<span class="sourceLineNo">558</span>    }<a name="line.558"></a>
+<span class="sourceLineNo">559</span>    writer.close();<a name="line.559"></a>
+<span class="sourceLineNo">560</span><a name="line.560"></a>
+<span class="sourceLineNo">561</span>    // close hfile.block.index.cacheonwrite<a name="line.561"></a>
+<span class="sourceLineNo">562</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false);<a name="line.562"></a>
+<span class="sourceLineNo">563</span><a name="line.563"></a>
+<span class="sourceLineNo">564</span>    // Read the HFile<a name="line.564"></a>
+<span class="sourceLineNo">565</span>    HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);<a name="line.565"></a>
+<span class="sourceLineNo">566</span><a name="line.566"></a>
+<span class="sourceLineNo">567</span>    boolean hasArrayIndexOutOfBoundsException = false;<a name="line.567"></a>
+<span class="sourceLineNo">568</span>    try {<a name="line.568"></a>
+<span class="sourceLineNo">569</span>      // get the mid-key.<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      reader.midKey();<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    } catch (ArrayIndexOutOfBoundsException e) {<a name="line.571"></a>
+<span class="sourceLineNo">572</span>      hasArrayIndexOutOfBoundsException = true;<a name="line.572"></a>
+<span class="sourceLineNo">573</span>    } finally {<a name="line.573"></a>
+<span class="sourceLineNo">574</span>      reader.close();<a name="line.574"></a>
+<span class="sourceLineNo">575</span>    }<a name="line.575"></a>
+<span class="sourceLineNo">576</span><a name="line.576"></a>
+<span class="sourceLineNo">577</span>    // to check if ArrayIndexOutOfBoundsException occurred<a name="line.577"></a>
+<span class="sourceLineNo">578</span>    assertFalse(hasArrayIndexOutOfBoundsException);<a name="line.578"></a>
+<span class="sourceLineNo">579</span>  }<a name="line.579"></a>
+<span class="sourceLineNo">580</span><a name="line.580"></a>
+<span class="sourceLineNo">581</span>  /**<a name="line.581"></a>
+<span class="sourceLineNo">582</span>   * Testing block index through the HFile writer/reader APIs. Allows to test<a name="line.582"></a>
+<span class="sourceLineNo">583</span>   * setting index block size through configuration, intermediate-level index<a name="line.583"></a>
+<span class="sourceLineNo">584</span>   * blocks, and caching index blocks on write.<a name="line.584"></a>
+<span class="sourceLineNo">585</span>   *<a name="line.585"></a>
+<span class="sourceLineNo">586</span>   * @throws IOException<a name="line.586"></a>
+<span class="sourceLineNo">587</span>   */<a name="line.587"></a>
+<span class="sourceLineNo">588</span>  @Test<a name="line.588"></a>
+<span class="sourceLineNo">589</span>  public void testHFileWriterAndReader() throws IOException {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),<a name="line.590"></a>
+<span class="sourceLineNo">591</span>        "hfile_for_block_index");<a name="line.591"></a>
+<span class="sourceLineNo">592</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.592"></a>
+<span class="sourceLineNo">593</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.593"></a>
+<span class="sourceLineNo">594</span>    BlockCache blockCache = cacheConf.getBlockCache();<a name="line.594"></a>
+<span class="sourceLineNo">595</span><a name="line.595"></a>
+<span class="sourceLineNo">596</span>    for (int testI = 0; testI &lt; INDEX_CHUNK_SIZES.length; ++testI) {<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      int indexBlockSize = INDEX_CHUNK_SIZES[testI];<a name="line.597"></a>
+<span class="sourceLineNo">598</span>      int expectedNumLevels = EXPECTED_NUM_LEVELS[testI];<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      LOG.info("Index block size: " + indexBlockSize + ", compression: "<a name="line.599"></a>
+<span class="sourceLineNo">600</span>          + compr);<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      // Evict all blocks that were cached-on-write by the previous invocation.<a name="line.601"></a>
+<span class="sourceLineNo">602</span>      blockCache.evictBlocksByHfileName(hfilePath.getName());<a name="line.602"></a>
+<span class="sourceLineNo">603</span><a name="line.603"></a>
+<span class="sourceLineNo">604</span>      conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      Set&lt;String&gt; keyStrSet = new HashSet&lt;&gt;();<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      byte[][] keys = new byte[NUM_KV][];<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      byte[][] values = new byte[NUM_KV][];<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>      // Write the HFile<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      {<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        HFileContext meta = new HFileContextBuilder()<a name="line.611"></a>
+<span class="sourceLineNo">612</span>                            .withBlockSize(SMALL_BLOCK_SIZE)<a name="line.612"></a>
+<span class="sourceLineNo">613</span>                            .withCompression(compr)<a name="line.613"></a>
+<span class="sourceLineNo">614</span>                            .build();<a name="line.614"></a>
+<span class="sourceLineNo">615</span>        HFile.Writer writer =<a name="line.615"></a>
+<span class="sourceLineNo">616</span>            HFile.getWriterFactory(conf, cacheConf)<a name="line.616"></a>
+<span class="sourceLineNo">617</span>                .withPath(fs, hfilePath)<a name="line.617"></a>
+<span class="sourceLineNo">618</span>                .withFileContext(meta)<a name="line.618"></a>
+<span class="sourceLineNo">619</span>                .create();<a name="line.619"></a>
+<span class="sourceLineNo">620</span>        Random rand = new Random(19231737);<a name="line.620"></a>
+<span class="sourceLineNo">621</span>        byte[] family = Bytes.toBytes("f");<a name="line.621"></a>
+<span class="sourceLineNo">622</span>        byte[] qualifier = Bytes.toBytes("q");<a name="line.622"></a>
+<span class="sourceLineNo">623</span>        for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.623"></a>
+<span class="sourceLineNo">624</span>          byte[] row = RandomKeyValueUtil.randomOrderedKey(rand, i);<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>          // Key will be interpreted by KeyValue.KEY_COMPARATOR<a name="line.626"></a>
+<span class="sourceLineNo">627</span>          KeyValue kv =<a name="line.627"></a>
+<span class="sourceLineNo">628</span>              new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),<a name="line.628"></a>
+<span class="sourceLineNo">629</span>                  RandomKeyValueUtil.randomValue(rand));<a name="line.629"></a>
+<span class="sourceLineNo">630</span>          byte[] k = kv.getKey();<a name="line.630"></a>
+<span class="sourceLineNo">631</span>          writer.append(kv);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>          keys[i] = k;<a name="line.632"></a>
+<span class="sourceLineNo">633</span>          values[i] = CellUtil.cloneValue(kv);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>          keyStrSet.add(Bytes.toStringBinary(k));<a name="line.634"></a>
+<span class="sourceLineNo">635</span>          if (i &gt; 0) {<a name="line.635"></a>
+<span class="sourceLineNo">636</span>            assertTrue((PrivateCellUtil.compare(CellComparatorImpl.COMPARATOR, kv, keys[i - 1],<a name="line.636"></a>
+<span class="sourceLineNo">637</span>                0, keys[i - 1].length)) &gt; 0);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>          }<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
+<span class="sourceLineNo">640</span><a name="line.640"></a>
+<span class="sourceLineNo">641</span>        writer.close();<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      }<a name="line.642"></a>
+<span class="sourceLineNo">643</span><a name="line.643"></a>
+<span class="sourceLineNo">644</span>      // Read the HFile<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);<a name="line.645"></a>
+<span class="sourceLineNo">646</span>      assertEquals(expectedNumLevels,<a name="line.646"></a>
+<span class="sourceLineNo">647</span>          reader.getTrailer().getNumDataIndexLevels());<a name="line.647"></a>
+<span class="sourceLineNo">648</span><a name="line.648"></a>
+<span class="sourceLineNo">649</span>      assertTrue(Bytes.equals(keys[0], ((KeyValue)reader.getFirstKey().get()).getKey()));<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      assertTrue(Bytes.equals(keys[NUM_KV - 1], ((KeyValue)reader.getLastKey().get()).getKey()));<a name="line.650"></a>
+<span class="sourceLineNo">651</span>      LOG.info("Last key: " + Bytes.toStringBinary(keys[NUM_KV - 1]));<a name="line.651"></a>
+<span class="sourceLineNo">652</span><a name="line.652"></a>
+<span class="sourceLineNo">653</span>      for (boolean pread : new boolean[] { false, true }) {<a name="line.653"></a>
+<span class="sourceLineNo">654</span>        HFileScanner scanner = reader.getScanner(true, pread);<a name="line.654"></a>
+<span class="sourceLineNo">655</span>        for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>          checkSeekTo(keys, scanner, i);<a name="line.656"></a>
+<span class="sourceLineNo">657</span>          checkKeyValue("i=" + i, keys[i], values[i],<a name="line.657"></a>
+<span class="sourceLineNo">658</span>              ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue());<a name="line.658"></a>
+<span class="sourceLineNo">659</span>        }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>        assertTrue(scanner.seekTo());<a name="line.660"></a>
+<span class="sourceLineNo">661</span>        for (int i = NUM_KV - 1; i &gt;= 0; --i) {<a name="line.661"></a>
+<span class="sourceLineNo">662</span>          checkSeekTo(keys, scanner, i);<a name="line.662"></a>
+<span class="sourceLineNo">663</span>          checkKeyValue("i=" + i, keys[i], values[i],<a name="line.663"></a>
+<span class="sourceLineNo">664</span>              ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue());<a name="line.664"></a>
+<span class="sourceLineNo">665</span>        }<a name="line.665"></a>
+<span class="sourceLineNo">666</span>      }<a name="line.666"></a>
+<span class="sourceLineNo">667</span><a name="line.667"></a>
+<span class="sourceLineNo">668</span>      // Manually compute the mid-key and validate it.<a name="line.668"></a>
+<span class="sourceLineNo">669</span>      HFile.Reader reader2 = reader;<a name="line.669"></a>
+<span class="sourceLineNo">670</span>      HFileBlock.FSReader fsReader = reader2.getUncachedBlockReader();<a name="line.670"></a>
+<span class="sourceLineNo">671</span><a name="line.671"></a>
+<span class="sourceLineNo">672</span>      HFileBlock.BlockIterator iter = fsReader.blockRange(0,<a name="line.672"></a>
+<span class="sourceLineNo">673</span>          reader.getTrailer().getLoadOnOpenDataOffset());<a name="line.673"></a>
+<span class="sourceLineNo">674</span>      HFileBlock block;<a name="line.674"></a>
+<span class="sourceLineNo">675</span>      List&lt;byte[]&gt; blockKeys = new ArrayList&lt;&gt;();<a name="line.675"></a>
+<span class="sourceLineNo">676</span>      while ((block = iter.nextBlock()) != null) {<a name="line.676"></a>
+<span class="sourceLineNo">677</span>        if (block.getBlockType() != BlockType.LEAF_INDEX)<a name="line.677"></a>
+<span class="sourceLineNo">678</span>          return;<a name="line.678"></a>
+<span class="sourceLineNo">679</span>        ByteBuff b = block.getBufferReadOnly();<a name="line.679"></a>
+<span class="sourceLineNo">680</span>        int n = b.getIntAfterPosition(0);<a name="line.680"></a>
+<span class="sourceLineNo">681</span>        // One int for the number of items, and n + 1 for the secondary index.<a name="line.681"></a>
+<span class="sourceLineNo">682</span>        int entriesOffset = Bytes.SIZEOF_INT * (n + 2);<a name="line.682"></a>
+<span class="sourceLineNo">683</span><a name="line.683"></a>
+<span class="sourceLineNo">684</span>        // Get all the keys from the leaf index block. S<a name="line.684"></a>
+<span class="sourceLineNo">685</span>        for (int i = 0; i &lt; n; ++i) {<a name="line.685"></a>
+<span class="sourceLineNo">686</span>          int keyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 1));<a name="line.686"></a>
+<span class="sourceLineNo">687</span>          int nextKeyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 2));<a name="line.687"></a>
+<span class="sourceLineNo">688</span>          int keyLen = nextKeyRelOffset - keyRelOffset;<a name="line.688"></a>
+<span class="sourceLineNo">689</span>          int keyOffset = b.arrayOffset() + entriesOffset + keyRelOffset +<a name="line.689"></a>
+<span class="sourceLineNo">690</span>              HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD;<a name="line.690"></a>
+<span class="sourceLineNo">691</span>          byte[] blockKey = Arrays.copyOfRange(b.array(), keyOffset, keyOffset<a name="line.691"></a>
+<span class="sourceLineNo">692</span>              + keyLen);<a name="line.692"></a>
+<span class="sourceLineNo">693</span>          String blockKeyStr = Bytes.toString(blockKey);<a name="line.693"></a>
+<span class="sourceLineNo">694</span>          blockKeys.add(blockKey);<a name="line.694"></a>
+<span class="sourceLineNo">695</span><a name="line.695"></a>
+<span class="sourceLineNo">696</span>          // If the first key of the block is not among the keys written, we<a name="line.696"></a>
+<span class="sourceLineNo">697</span>          // are not parsing the non-root index block format correctly.<a name="line.697"></a>
+<span class="sourceLineNo">698</span>          assertTrue("Invalid block key from leaf-level block: " + blockKeyStr,<a name="line.698"></a>
+<span class="sourceLineNo">699</span>              keyStrSet.contains(blockKeyStr));<a name="line.699"></a>
+<span class="sourceLineNo">700</span>        }<a name="line.700"></a>
+<span class="sourceLineNo">701</span>      }<a name="line.701"></a>
 <span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>          // If the first key of the block is not among the keys written, we<a name="line.703"></a>
-<span class="sourceLineNo">704</span>          // are not parsing the non-root index block format correctly.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>          assertTrue("Invalid block key from leaf-level block: " + blockKeyStr,<a name="line.705"></a>
-<span class="sourceLineNo">706</span>              keyStrSet.contains(blockKeyStr));<a name="line.706"></a>
-<span class="sourceLineNo">707</span>        }<a name="line.707"></a>
-<span class="sourceLineNo">708</span>      }<a name="line.708"></a>
-<span class="sourceLineNo">709</span><a name="line.709"></a>
-<span class="sourceLineNo">710</span>      // Validate the mid-key.<a name="line.710"></a>
-<span class="sourceLineNo">711</span>      assertEquals(<a name="line.711"></a>
-<span class="sourceLineNo">712</span>          Bytes.toStringBinary(blockKeys.get((blockKeys.size() - 1) / 2)),<a name="line.712"></a>
-<span class="sourceLineNo">713</span>          reader.midKey());<a name="line.713"></a>
-<span class="sourceLineNo">714</span><a name="line.714"></a>
-<span class="sourceLineNo">715</span>      assertEquals(UNCOMPRESSED_INDEX_SIZES[testI],<a name="line.715"></a>
-<span class="sourceLineNo">716</span>          reader.getTrailer().getUncompressedDataIndexSize());<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>      reader.close();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      reader2.close();<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    }<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  private void checkSeekTo(byte[][] keys, HFileScanner scanner, int i)<a name="line.723"></a>
-<span class="sourceLineNo">724</span>      throws IOException {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    assertEquals("Failed to seek to key #" + i + " (" + Bytes.toStringBinary(keys[i]) + ")", 0,<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys[i])));<a name="line.726"></a>
+<span class="sourceLineNo">703</span>      // Validate the mid-key.<a name="line.703"></a>
+<span class="sourceLineNo">704</span>      assertEquals(<a name="line.704"></a>
+<span class="sourceLineNo">705</span>          Bytes.toStringBinary(blockKeys.get((blockKeys.size() - 1) / 2)),<a name="line.705"></a>
+<span class="sourceLineNo">706</span>          reader.midKey());<a name="line.706"></a>
+<span class="sourceLineNo">707</span><a name="line.707"></a>
+<span class="sourceLineNo">708</span>      assertEquals(UNCOMPRESSED_INDEX_SIZES[testI],<a name="line.708"></a>
+<span class="sourceLineNo">709</span>          reader.getTrailer().getUncompressedDataIndexSize());<a name="line.709"></a>
+<span class="sourceLineNo">710</span><a name="line.710"></a>
+<span class="sourceLineNo">711</span>      reader.close();<a name="line.711"></a>
+<span class="sourceLineNo">712</span>      reader2.close();<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    }<a name="line.713"></a>
+<span class="sourceLineNo">714</span>  }<a name="line.714"></a>
+<span class="sourceLineNo">715</span><a name="line.715"></a>
+<span class="sourceLineNo">716</span>  private void checkSeekTo(byte[][] keys, HFileScanner scanner, int i)<a name="line.716"></a>
+<span class="sourceLineNo">717</span>      throws IOException {<a name="line.717"></a>
+<span class="sourceLineNo">718</span>    assertEquals("Failed to seek to key #" + i + " (" + Bytes.toStringBinary(keys[i]) + ")", 0,<a name="line.718"></a>
+<span class="sourceLineNo">719</span>        scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys[i])));<a name="line.719"></a>
+<span class="sourceLineNo">720</span>  }<a name="line.720"></a>
+<span class="sourceLineNo">721</span><a name="line.721"></a>
+<span class="sourceLineNo">722</span>  private void assertArrayEqualsBuffer(String msgPrefix, byte[] arr,<a name="line.722"></a>
+<span class="sourceLineNo">723</span>      ByteBuffer buf) {<a name="line.723"></a>
+<span class="sourceLineNo">724</span>    assertEquals(msgPrefix + ": expected " + Bytes.toStringBinary(arr)<a name="line.724"></a>
+<span class="sourceLineNo">725</span>        + ", actual " + Bytes.toStringBinary(buf), 0, Bytes.compareTo(arr, 0,<a name="line.725"></a>
+<span class="sourceLineNo">726</span>        arr.length, buf.array(), buf.arrayOffset(), buf.limit()));<a name="line.726"></a>
 <span class="sourceLineNo">727</span>  }<a name="line.727"></a>
 <span class="sourceLineNo">728</span><a name="line.728"></a>
-<span class="sourceLineNo">729</span>  private void assertArrayEqualsBuffer(String msgPrefix, byte[] arr,<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      ByteBuffer buf) {<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    assertEquals(msgPrefix + ": expected " + Bytes.toStringBinary(arr)<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        + ", actual " + Bytes.toStringBinary(buf), 0, Bytes.compareTo(arr, 0,<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        arr.length, buf.array(), buf.arrayOffset(), buf.limit()));<a name="line.733"></a>
-<span class="sourceLineNo">734</span>  }<a name="line.734"></a>
-<span class="sourceLineNo">735</span><a name="line.735"></a>
-<span class="sourceLineNo">736</span>  /** Check a key/value pair after it was read by the reader */<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  private void checkKeyValue(String msgPrefix, byte[] expectedKey,<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      byte[] expectedValue, ByteBuffer keyRead, ByteBuffer valueRead) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    if (!msgPrefix.isEmpty())<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      msgPrefix += ". ";<a name="line.740"></a>
-<span class="sourceLineNo">741</span><a name="line.741"></a>
-<span class="sourceLineNo">742</span>    assertArrayEqualsBuffer(msgPrefix + "Invalid key", expectedKey, keyRead);<a name="line.742"></a>
-<span class="sourceLineNo">743</span>    assertArrayEqualsBuffer(msgPrefix + "Invalid value", expectedValue,<a name="line.743"></a>
-<span class="sourceLineNo">744</span>        valueRead);<a name="line.744"></a>
-<span class="sourceLineNo">745</span>  }<a name="line.745"></a>
-<span class="sourceLineNo">746</span><a name="line.746"></a>
-<span class="sourceLineNo">747</span>  @Test<a name="line.747"></a>
-<span class="sourceLineNo">748</span>  public void testIntermediateLevelIndicesWithLargeKeys() throws IOException {<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    testIntermediateLevelIndicesWithLargeKeys(16);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>  }<a name="line.750"></a>
-<span class="sourceLineNo">751</span><a name="line.751"></a>
-<span class="sourceLineNo">752</span>  @Test<a name="line.752"></a>
-<span class="sourceLineNo">753</span>  public void testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries() throws IOException {<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    // because of the large rowKeys, we will end up with a 50-level block index without sanity check<a name="line.754"></a>
-<span class="sourceLineNo">755</span>    testIntermediateLevelIndicesWithLargeKeys(2);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>  }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>  public void testIntermediateLevelIndicesWithLargeKeys(int minNumEntries) throws IOException {<a name="line.758"></a>
-<span class="sourceLineNo">759</span>    Path hfPath = new Path(TEST_UTIL.getDataTestDir(),<a name="line.759"></a>
-<span class="sourceLineNo">760</span>      "testIntermediateLevelIndicesWithLargeKeys.hfile");<a name="line.760"></a>
-<span class="sourceLineNo">761</span>    int maxChunkSize = 1024;<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    FileSystem fs = FileSystem.get(conf);<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    conf.setInt(HFileBlockIndex.MIN_INDEX_NUM_ENTRIES_KEY, minNumEntries);<a name="line.765"></a>
-<span class="sourceLineNo">766</span>    HFileContext context = new HFileContextBuilder().withBlockSize(16).build();<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf)<a name="line.767"></a>
-<span class="sourceLineNo">768</span>            .withFileContext(context)<a name="line.768"></a>
-<span class="sourceLineNo">769</span>            .withPath(fs, hfPath).create();<a name="line.769"></a>
-<span class="sourceLineNo">770</span>    List&lt;byte[]&gt; keys = new ArrayList&lt;&gt;();<a name="line.770"></a>
-<span class="sourceLineNo">771</span><a name="line.771"></a>
-<span class="sourceLineNo">772</span>    // This should result in leaf-level indices and a root level index<a name="line.772"></a>
-<span class="sourceLineNo">773</span>    for (int i=0; i &lt; 100; i++) {<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      byte[] rowkey = new byte[maxChunkSize + 1];<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      byte[] b = Bytes.toBytes(i);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      System.arraycopy(b, 0, rowkey, rowkey.length - b.length, b.length);<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      keys.add(rowkey);<a name="line.777"></a>
-<span class="sourceLineNo">778</span>      hfw.append(CellUtil.createCell(rowkey));<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    }<a name="line.779"></a>
-<span class="sourceLineNo">780</span>    hfw.close();<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>    HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf);<a name="line.782"></a>
-<span class="sourceLineNo">783</span>    // Scanner doesn't do Cells yet.  Fix.<a name="line.783"></a>
-<span class="sourceLineNo">784</span>    HFileScanner scanner = reader.getScanner(true, true);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>    for (int i = 0; i &lt; keys.size(); ++i) {<a name="line.785"></a>
-<span class="sourceLineNo">786</span>      scanner.seekTo(CellUtil.createCell(keys.get(i)));<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    reader.close();<a name="line.788"></a>
-<span class="sourceLineNo">789</span>  }<a name="line.789"></a>
-<span class="sourceLineNo">790</span>}<a name="line.790"></a>
-<span class="sourceLineNo">791</span><a name="line.791"></a>
+<span class="sourceLineNo">729</span>  /** Check a key/value pair after it was read by the reader */<a name="line.729"></a>
+<span class="sourceLineNo">730</span>  private void checkKeyValue(String msgPrefix, byte[] expectedKey,<a name="line.730"></a>
+<span class="sourceLineNo">731</span>      byte[] expectedValue, ByteBuffer keyRead, ByteBuffer valueRead) {<a name="line.731"></a>
+<span class="sourceLineNo">732</span>    if (!msgPrefix.isEmpty())<a name="line.732"></a>
+<span class="sourceLineNo">733</span>      msgPrefix += ". ";<a name="line.733"></a>
+<span class="sourceLineNo">734</span><a name="line.734"></a>
+<span class="sourceLineNo">735</span>    assertArrayEqualsBuffer(msgPrefix + "Invalid key", expectedKey, keyRead);<a name="line.735"></a>
+<span class="sourceLineNo">736</span>    assertArrayEqualsBuffer(msgPrefix + "Invalid value", expectedValue,<a name="line.736"></a>
+<span class="sourceLineNo">737</span>        valueRead);<a name="line.737"></a>
+<span class="sourceLineNo">738</span>  }<a name="line.738"></a>
+<span class="sourceLineNo">739</span><a name="line.739"></a>
+<span class="sourceLineNo">740</span>  @Test<a name="line.740"></a>
+<span class="sourceLineNo">741</span>  public void testIntermediateLevelIndicesWithLargeKeys() throws IOException {<a name="line.741"></a>
+<span class="sourceLineNo">742</span>    testIntermediateLevelIndicesWithLargeKeys(16);<a name="line.742"></a>
+<span class="sourceLineNo">743</span>  }<a name="line.743"></a>
+<span class="sourceLineNo">744</span><a name="line.744"></a>
+<span class="sourceLineNo">745</span>  @Test<a name="line.745"></a>
+<span class="sourceLineNo">746</span>  public void testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries() throws IOException {<a name="line.746"></a>
+<span class="sourceLineNo">747</span>    // because of the large rowKeys, we will end up with a 50-level block index without sanity check<a name="line.747"></a>
+<span class="sourceLineNo">748</span>    testIntermediateLevelIndicesWithLargeKeys(2);<a name="line.748"></a>
+<span class="sourceLineNo">749</span>  }<a name="line.749"></a>
+<span class="sourceLineNo">750</span><a name="line.750"></a>
+<span class="sourceLineNo">751</span>  public void testIntermediateLevelIndicesWithLargeKeys(int minNumEntries) throws IOException {<a name="line.751"></a>
+<span class="sourceLineNo">752</span>    Path hfPath = new Path(TEST_UTIL.getDataTestDir(),<a name="line.752"></a>
+<span class="sourceLineNo">753</span>      "testIntermediateLevelIndicesWithLargeKeys.hfile");<a name="line.753"></a>
+<span class="sourceLineNo">754</span>    int maxChunkSize = 1024;<a name="line.754"></a>
+<span class="sourceLineNo">755</span>    FileSystem fs = FileSystem.get(conf);<a name="line.755"></a>
+<span class="sourceLineNo">756</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    conf.setInt(HFileBlockIndex.MIN_INDEX_NUM_ENTRIES_KEY, minNumEntries);<a name="line.758"></a>
+<span class="sourceLineNo">759</span>    HFileContext context = new HFileContextBuilder().withBlockSize(16).build();<a name="line.759"></a>
+<span class="sourceLineNo">760</span>    HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf)<a name="line.760"></a>
+<span class="sourceLineNo">761</span>            .withFileContext(context)<a name="line.761"></a>
+<span class="sourceLineNo">762</span>            .withPath(fs, hfPath).create();<a name="line.762"></a>
+<span class="sourceLineNo">763</span>    List&lt;byte[]&gt; keys = new ArrayList&lt;&gt;();<a name="line.763"></a>
+<span class="sourceLineNo">764</span><a name="line.764"></a>
+<span class="sourceLineNo">765</span>    // This should result in leaf-level indices and a root level index<a name="line.765"></a>
+<span class="sourceLineNo">766</span>    for (int i=0; i &lt; 100; i++) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>      byte[] rowkey = new byte[maxChunkSize + 1];<a name="line.767"></a>
+<span class="sourceLineNo">768</span>      byte[] b = Bytes.toBytes(i);<a name="line.768"></a>
+<span class="sourceLineNo">769</span>      System.arraycopy(b, 0, rowkey, rowkey.length - b.length, b.length);<a name="line.769"></a>
+<span class="sourceLineNo">770</span>      keys.add(rowkey);<a name="line.770"></a>
+<span class="sourceLineNo">771</span>      hfw.append(CellUtil.createCell(rowkey));<a name="line.771"></a>
+<span class="sourceLineNo">772</span>    }<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    hfw.close();<a name="line.773"></a>
+<span class="sourceLineNo">774</span><a name="line.774"></a>
+<span class="sourceLineNo">775</span>    HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf);<a name="line.775"></a>
+<span class="sourceLineNo">776</span>    // Scanner doesn't do Cells yet.  Fix.<a name="line.776"></a>
+<span class="sourceLineNo">777</span>    HFileScanner scanner = reader.getScanner(true, true);<a name="line.777"></a>
+<span class="sourceLineNo">778</span>    for (int i = 0; i &lt; keys.size(); ++i) {<a name="line.778"></a>
+<span class="sourceLineNo">779</span>      scanner.seekTo(CellUtil.createCell(keys.get(i)));<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    }<a name="line.780"></a>
+<span class="sourceLineNo">781</span>    reader.close();<a name="line.781"></a>
+<span class="sourceLineNo">782</span>  }<a name="line.782"></a>
+<span class="sourceLineNo">783</span>}<a name="line.783"></a>
+<span class="sourceLineNo">784</span><a name="line.784"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html
index 415d5dd..c481a6d 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html
@@ -72,122 +72,123 @@
 <span class="sourceLineNo">064</span>    conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true);<a name="line.64"></a>
 <span class="sourceLineNo">065</span>    fs = HFileSystem.get(conf);<a name="line.65"></a>
 <span class="sourceLineNo">066</span>    CacheConfig.blockCacheDisabled = false;<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    cacheConf = new CacheConfig(conf);<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  }<a name="line.68"></a>
-<span class="sourceLineNo">069</span><a name="line.69"></a>
-<span class="sourceLineNo">070</span>  @Test<a name="line.70"></a>
-<span class="sourceLineNo">071</span>  public void testPrefetchSetInHCDWorks() {<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes("f"));<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    hcd.setPrefetchBlocksOnOpen(true);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    Configuration c = HBaseConfiguration.create();<a name="line.74"></a>
-<span class="sourceLineNo">075</span>    assertFalse(c.getBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, false));<a name="line.75"></a>
-<span class="sourceLineNo">076</span>    CacheConfig cc = new CacheConfig(c, hcd);<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    assertTrue(cc.shouldPrefetchOnOpen());<a name="line.77"></a>
-<span class="sourceLineNo">078</span>  }<a name="line.78"></a>
-<span class="sourceLineNo">079</span><a name="line.79"></a>
-<span class="sourceLineNo">080</span>  @Test<a name="line.80"></a>
-<span class="sourceLineNo">081</span>  public void testPrefetch() throws Exception {<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    Path storeFile = writeStoreFile("TestPrefetch");<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    readStoreFile(storeFile);<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  }<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  @Test<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  public void testPrefetchRace() throws Exception {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    for (int i = 0; i &lt; 10; i++) {<a name="line.88"></a>
-<span class="sourceLineNo">089</span>      Path storeFile = writeStoreFile("TestPrefetchRace-" + i);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>      readStoreFileLikeScanner(storeFile);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    }<a name="line.91"></a>
-<span class="sourceLineNo">092</span>  }<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>  /**<a name="line.94"></a>
-<span class="sourceLineNo">095</span>   * Read a storefile in the same manner as a scanner -- using non-positional reads and<a name="line.95"></a>
-<span class="sourceLineNo">096</span>   * without waiting for prefetch to complete.<a name="line.96"></a>
-<span class="sourceLineNo">097</span>   */<a name="line.97"></a>
-<span class="sourceLineNo">098</span>  private void readStoreFileLikeScanner(Path storeFilePath) throws Exception {<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    // Open the file<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    do {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>      long offset = 0;<a name="line.102"></a>
-<span class="sourceLineNo">103</span>      while (offset &lt; reader.getTrailer().getLoadOnOpenDataOffset()) {<a name="line.103"></a>
-<span class="sourceLineNo">104</span>        HFileBlock block = reader.readBlock(offset, -1, false, /*pread=*/false,<a name="line.104"></a>
-<span class="sourceLineNo">105</span>            false, true, null, null);<a name="line.105"></a>
-<span class="sourceLineNo">106</span>        offset += block.getOnDiskSizeWithHeader();<a name="line.106"></a>
-<span class="sourceLineNo">107</span>      }<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    } while (!reader.prefetchComplete());<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  private void readStoreFile(Path storeFilePath) throws Exception {<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    // Open the file<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);<a name="line.113"></a>
-<span class="sourceLineNo">114</span><a name="line.114"></a>
-<span class="sourceLineNo">115</span>    while (!reader.prefetchComplete()) {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      // Sleep for a bit<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      Thread.sleep(1000);<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>    // Check that all of the data blocks were preloaded<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    BlockCache blockCache = cacheConf.getBlockCache();<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    long offset = 0;<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    while (offset &lt; reader.getTrailer().getLoadOnOpenDataOffset()) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null);<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset);<a name="line.125"></a>
-<span class="sourceLineNo">126</span>      boolean isCached = blockCache.getBlock(blockCacheKey, true, false, true) != null;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>      if (block.getBlockType() == BlockType.DATA ||<a name="line.127"></a>
-<span class="sourceLineNo">128</span>          block.getBlockType() == BlockType.ROOT_INDEX ||<a name="line.128"></a>
-<span class="sourceLineNo">129</span>          block.getBlockType() == BlockType.INTERMEDIATE_INDEX) {<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        assertTrue(isCached);<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      }<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      offset += block.getOnDiskSizeWithHeader();<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    }<a name="line.133"></a>
-<span class="sourceLineNo">134</span>  }<a name="line.134"></a>
-<span class="sourceLineNo">135</span><a name="line.135"></a>
-<span class="sourceLineNo">136</span>  private Path writeStoreFile(String fname) throws IOException {<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), fname);<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    HFileContext meta = new HFileContextBuilder()<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      .withBlockSize(DATA_BLOCK_SIZE)<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      .build();<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs)<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      .withOutputDir(storeFileParentDir)<a name="line.142"></a>
-<span class="sourceLineNo">143</span>      .withComparator(CellComparatorImpl.COMPARATOR)<a name="line.143"></a>
-<span class="sourceLineNo">144</span>      .withFileContext(meta)<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      .build();<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
-<span class="sourceLineNo">147</span>    final int rowLen = 32;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i);<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      byte[] v = RandomKeyValueUtil.randomValue(RNG);<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      int cfLen = RNG.nextInt(k.length - rowLen + 1);<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      KeyValue kv = new KeyValue(<a name="line.152"></a>
-<span class="sourceLineNo">153</span>          k, 0, rowLen,<a name="line.153"></a>
-<span class="sourceLineNo">154</span>          k, rowLen, cfLen,<a name="line.154"></a>
-<span class="sourceLineNo">155</span>          k, rowLen + cfLen, k.length - rowLen - cfLen,<a name="line.155"></a>
-<span class="sourceLineNo">156</span>          RNG.nextLong(),<a name="line.156"></a>
-<span class="sourceLineNo">157</span>          generateKeyType(RNG),<a name="line.157"></a>
-<span class="sourceLineNo">158</span>          v, 0, v.length);<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      sfw.append(kv);<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    }<a name="line.160"></a>
-<span class="sourceLineNo">161</span><a name="line.161"></a>
-<span class="sourceLineNo">162</span>    sfw.close();<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    return sfw.getPath();<a name="line.163"></a>
-<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
-<span class="sourceLineNo">165</span><a name="line.165"></a>
-<span class="sourceLineNo">166</span>  public static KeyValue.Type generateKeyType(Random rand) {<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    if (rand.nextBoolean()) {<a name="line.167"></a>
-<span class="sourceLineNo">168</span>      // Let's make half of KVs puts.<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      return KeyValue.Type.Put;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>    } else {<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      KeyValue.Type keyType =<a name="line.171"></a>
-<span class="sourceLineNo">172</span>          KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum)<a name="line.173"></a>
-<span class="sourceLineNo">174</span>      {<a name="line.174"></a>
-<span class="sourceLineNo">175</span>        throw new RuntimeException("Generated an invalid key type: " + keyType<a name="line.175"></a>
-<span class="sourceLineNo">176</span>            + ". " + "Probably the layout of KeyValue.Type has changed.");<a name="line.176"></a>
-<span class="sourceLineNo">177</span>      }<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      return keyType;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    }<a name="line.179"></a>
-<span class="sourceLineNo">180</span>  }<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>}<a name="line.182"></a>
+<span class="sourceLineNo">067</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.67"></a>
+<span class="sourceLineNo">068</span>    cacheConf = new CacheConfig(conf);<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  }<a name="line.69"></a>
+<span class="sourceLineNo">070</span><a name="line.70"></a>
+<span class="sourceLineNo">071</span>  @Test<a name="line.71"></a>
+<span class="sourceLineNo">072</span>  public void testPrefetchSetInHCDWorks() {<a name="line.72"></a>
+<span class="sourceLineNo">073</span>    HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes("f"));<a name="line.73"></a>
+<span class="sourceLineNo">074</span>    hcd.setPrefetchBlocksOnOpen(true);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    Configuration c = HBaseConfiguration.create();<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    assertFalse(c.getBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, false));<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    CacheConfig cc = new CacheConfig(c, hcd);<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    assertTrue(cc.shouldPrefetchOnOpen());<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
+<span class="sourceLineNo">080</span><a name="line.80"></a>
+<span class="sourceLineNo">081</span>  @Test<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  public void testPrefetch() throws Exception {<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    Path storeFile = writeStoreFile("TestPrefetch");<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    readStoreFile(storeFile);<a name="line.84"></a>
+<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
+<span class="sourceLineNo">086</span><a name="line.86"></a>
+<span class="sourceLineNo">087</span>  @Test<a name="line.87"></a>
+<span class="sourceLineNo">088</span>  public void testPrefetchRace() throws Exception {<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    for (int i = 0; i &lt; 10; i++) {<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      Path storeFile = writeStoreFile("TestPrefetchRace-" + i);<a name="line.90"></a>
+<span class="sourceLineNo">091</span>      readStoreFileLikeScanner(storeFile);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    }<a name="line.92"></a>
+<span class="sourceLineNo">093</span>  }<a name="line.93"></a>
+<span class="sourceLineNo">094</span><a name="line.94"></a>
+<span class="sourceLineNo">095</span>  /**<a name="line.95"></a>
+<span class="sourceLineNo">096</span>   * Read a storefile in the same manner as a scanner -- using non-positional reads and<a name="line.96"></a>
+<span class="sourceLineNo">097</span>   * without waiting for prefetch to complete.<a name="line.97"></a>
+<span class="sourceLineNo">098</span>   */<a name="line.98"></a>
+<span class="sourceLineNo">099</span>  private void readStoreFileLikeScanner(Path storeFilePath) throws Exception {<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    // Open the file<a name="line.100"></a>
+<span class="sourceLineNo">101</span>    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    do {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>      long offset = 0;<a name="line.103"></a>
+<span class="sourceLineNo">104</span>      while (offset &lt; reader.getTrailer().getLoadOnOpenDataOffset()) {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>        HFileBlock block = reader.readBlock(offset, -1, false, /*pread=*/false,<a name="line.105"></a>
+<span class="sourceLineNo">106</span>            false, true, null, null);<a name="line.106"></a>
+<span class="sourceLineNo">107</span>        offset += block.getOnDiskSizeWithHeader();<a name="line.107"></a>
+<span class="sourceLineNo">108</span>      }<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    } while (!reader.prefetchComplete());<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  }<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>  private void readStoreFile(Path storeFilePath) throws Exception {<a name="line.112"></a>
+<span class="sourceLineNo">113</span>    // Open the file<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf);<a name="line.114"></a>
+<span class="sourceLineNo">115</span><a name="line.115"></a>
+<span class="sourceLineNo">116</span>    while (!reader.prefetchComplete()) {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      // Sleep for a bit<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      Thread.sleep(1000);<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
+<span class="sourceLineNo">120</span><a name="line.120"></a>
+<span class="sourceLineNo">121</span>    // Check that all of the data blocks were preloaded<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    BlockCache blockCache = cacheConf.getBlockCache();<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    long offset = 0;<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    while (offset &lt; reader.getTrailer().getLoadOnOpenDataOffset()) {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null);<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset);<a name="line.126"></a>
+<span class="sourceLineNo">127</span>      boolean isCached = blockCache.getBlock(blockCacheKey, true, false, true) != null;<a name="line.127"></a>
+<span class="sourceLineNo">128</span>      if (block.getBlockType() == BlockType.DATA ||<a name="line.128"></a>
+<span class="sourceLineNo">129</span>          block.getBlockType() == BlockType.ROOT_INDEX ||<a name="line.129"></a>
+<span class="sourceLineNo">130</span>          block.getBlockType() == BlockType.INTERMEDIATE_INDEX) {<a name="line.130"></a>
+<span class="sourceLineNo">131</span>        assertTrue(isCached);<a name="line.131"></a>
+<span class="sourceLineNo">132</span>      }<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      offset += block.getOnDiskSizeWithHeader();<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    }<a name="line.134"></a>
+<span class="sourceLineNo">135</span>  }<a name="line.135"></a>
+<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">137</span>  private Path writeStoreFile(String fname) throws IOException {<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), fname);<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    HFileContext meta = new HFileContextBuilder()<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      .withBlockSize(DATA_BLOCK_SIZE)<a name="line.140"></a>
+<span class="sourceLineNo">141</span>      .build();<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs)<a name="line.142"></a>
+<span class="sourceLineNo">143</span>      .withOutputDir(storeFileParentDir)<a name="line.143"></a>
+<span class="sourceLineNo">144</span>      .withComparator(CellComparatorImpl.COMPARATOR)<a name="line.144"></a>
+<span class="sourceLineNo">145</span>      .withFileContext(meta)<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      .build();<a name="line.146"></a>
+<span class="sourceLineNo">147</span><a name="line.147"></a>
+<span class="sourceLineNo">148</span>    final int rowLen = 32;<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i);<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      byte[] v = RandomKeyValueUtil.randomValue(RNG);<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      int cfLen = RNG.nextInt(k.length - rowLen + 1);<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      KeyValue kv = new KeyValue(<a name="line.153"></a>
+<span class="sourceLineNo">154</span>          k, 0, rowLen,<a name="line.154"></a>
+<span class="sourceLineNo">155</span>          k, rowLen, cfLen,<a name="line.155"></a>
+<span class="sourceLineNo">156</span>          k, rowLen + cfLen, k.length - rowLen - cfLen,<a name="line.156"></a>
+<span class="sourceLineNo">157</span>          RNG.nextLong(),<a name="line.157"></a>
+<span class="sourceLineNo">158</span>          generateKeyType(RNG),<a name="line.158"></a>
+<span class="sourceLineNo">159</span>          v, 0, v.length);<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      sfw.append(kv);<a name="line.160"></a>
+<span class="sourceLineNo">161</span>    }<a name="line.161"></a>
+<span class="sourceLineNo">162</span><a name="line.162"></a>
+<span class="sourceLineNo">163</span>    sfw.close();<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    return sfw.getPath();<a name="line.164"></a>
+<span class="sourceLineNo">165</span>  }<a name="line.165"></a>
+<span class="sourceLineNo">166</span><a name="line.166"></a>
+<span class="sourceLineNo">167</span>  public static KeyValue.Type generateKeyType(Random rand) {<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    if (rand.nextBoolean()) {<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      // Let's make half of KVs puts.<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      return KeyValue.Type.Put;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>    } else {<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      KeyValue.Type keyType =<a name="line.172"></a>
+<span class="sourceLineNo">173</span>          KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum)<a name="line.174"></a>
+<span class="sourceLineNo">175</span>      {<a name="line.175"></a>
+<span class="sourceLineNo">176</span>        throw new RuntimeException("Generated an invalid key type: " + keyType<a name="line.176"></a>
+<span class="sourceLineNo">177</span>            + ". " + "Probably the layout of KeyValue.Type has changed.");<a name="line.177"></a>
+<span class="sourceLineNo">178</span>      }<a name="line.178"></a>
+<span class="sourceLineNo">179</span>      return keyType;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    }<a name="line.180"></a>
+<span class="sourceLineNo">181</span>  }<a name="line.181"></a>
+<span class="sourceLineNo">182</span><a name="line.182"></a>
+<span class="sourceLineNo">183</span>}<a name="line.183"></a>
 
 
 


[25/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/checkstyle-aggregate.html
----------------------------------------------------------------------
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 5e3e983..51f6684 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Checkstyle Results</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -294,7 +294,7 @@
 <td>3815</td>
 <td>0</td>
 <td>0</td>
-<td>14837</td></tr></table></div>
+<td>14796</td></tr></table></div>
 <div class="section">
 <h2><a name="Files"></a>Files</h2>
 <table border="0" class="table table-striped">
@@ -3297,7 +3297,7 @@
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheConfig.java">org/apache/hadoop/hbase/io/hfile/CacheConfig.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>9</td></tr>
+<td>6</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.CacheStats.java">org/apache/hadoop/hbase/io/hfile/CacheStats.java</a></td>
 <td>0</td>
@@ -3457,7 +3457,7 @@
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestBlockCacheReporting.java">org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>5</td></tr>
+<td>2</td></tr>
 <tr class="a">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestCacheConfig.java">org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java</a></td>
 <td>0</td>
@@ -3502,7 +3502,7 @@
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestHFileBlockIndex.java">org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java</a></td>
 <td>0</td>
 <td>0</td>
-<td>41</td></tr>
+<td>8</td></tr>
 <tr class="b">
 <td><a href="#org.apache.hadoop.hbase.io.hfile.TestHFileBlockPositionalRead.java">org/apache/hadoop/hbase/io/hfile/TestHFileBlockPositionalRead.java</a></td>
 <td>0</td>
@@ -5974,3686 +5974,3681 @@
 <td>0</td>
 <td>3</td></tr>
 <tr class="b">
-<td><a href="#org.apache.hadoop.hbase.regionserver.AbstractMultiFileWriter.java">org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.java</a></td>
-<td>0</td>
-<td>0</td>
-<td>2</td></tr>
-<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.AbstractTestDateTieredCompactionPolicy.java">org/apache/hadoop/hbase/regionserver/AbstractTestDateTieredCompactionPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.AnnotationReadingPriorityFunction.java">org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.BaseRowProcessor.java">org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.BusyRegionSplitPolicy.java">org/apache/hadoop/hbase/regionserver/BusyRegionSplitPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ByteBufferChunkKeyValue.java">org/apache/hadoop/hbase/regionserver/ByteBufferChunkKeyValue.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CSLMImmutableSegment.java">org/apache/hadoop/hbase/regionserver/CSLMImmutableSegment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CellArrayImmutableSegment.java">org/apache/hadoop/hbase/regionserver/CellArrayImmutableSegment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CellArrayMap.java">org/apache/hadoop/hbase/regionserver/CellArrayMap.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CellChunkMap.java">org/apache/hadoop/hbase/regionserver/CellChunkMap.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CellFlatMap.java">org/apache/hadoop/hbase/regionserver/CellFlatMap.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CellSet.java">org/apache/hadoop/hbase/regionserver/CellSet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CellSink.java">org/apache/hadoop/hbase/regionserver/CellSink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ChangedReadersObserver.java">org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.Chunk.java">org/apache/hadoop/hbase/regionserver/Chunk.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ChunkCreator.java">org/apache/hadoop/hbase/regionserver/ChunkCreator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CompactSplit.java">org/apache/hadoop/hbase/regionserver/CompactSplit.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CompactedHFilesDischargeHandler.java">org/apache/hadoop/hbase/regionserver/CompactedHFilesDischargeHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CompactedHFilesDischarger.java">org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CompactingMemStore.java">org/apache/hadoop/hbase/regionserver/CompactingMemStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CompactionPipeline.java">org/apache/hadoop/hbase/regionserver/CompactionPipeline.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CompactionTool.java">org/apache/hadoop/hbase/regionserver/CompactionTool.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CompositeImmutableSegment.java">org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy.java">org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.CreateRandomStoreFile.java">org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.DataBlockEncodingTool.java">org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.DateTieredStoreEngine.java">org/apache/hadoop/hbase/regionserver/DateTieredStoreEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.java">org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>31</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.DefaultMemStore.java">org/apache/hadoop/hbase/regionserver/DefaultMemStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.DefaultStoreFlusher.java">org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.DelegatingKeyValueScanner.java">org/apache/hadoop/hbase/regionserver/DelegatingKeyValueScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.DelimitedKeyPrefixRegionSplitPolicy.java">org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy.java">org/apache/hadoop/hbase/regionserver/DisabledRegionSplitPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.FavoredNodesForRegion.java">org/apache/hadoop/hbase/regionserver/FavoredNodesForRegion.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.java">org/apache/hadoop/hbase/regionserver/FifoRpcSchedulerFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.FlushPolicyFactory.java">org/apache/hadoop/hbase/regionserver/FlushPolicyFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.FlushRequestListener.java">org/apache/hadoop/hbase/regionserver/FlushRequestListener.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.FlushRequester.java">org/apache/hadoop/hbase/regionserver/FlushRequester.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.HMobStore.java">org/apache/hadoop/hbase/regionserver/HMobStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.HRegion.java">org/apache/hadoop/hbase/regionserver/HRegion.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>209</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.HRegionFileSystem.java">org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>47</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.HRegionServer.java">org/apache/hadoop/hbase/regionserver/HRegionServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>74</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.HRegionServerCommandLine.java">org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.HStore.java">org/apache/hadoop/hbase/regionserver/HStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>43</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.HStoreFile.java">org/apache/hadoop/hbase/regionserver/HStoreFile.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.HeapMemoryManager.java">org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.HeapMemoryTuner.java">org/apache/hadoop/hbase/regionserver/HeapMemoryTuner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ImmutableSegment.java">org/apache/hadoop/hbase/regionserver/ImmutableSegment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.IncreasingToUpperBoundRegionSplitPolicy.java">org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.InternalScan.java">org/apache/hadoop/hbase/regionserver/InternalScan.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.InternalScanner.java">org/apache/hadoop/hbase/regionserver/InternalScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.KeyPrefixRegionSplitPolicy.java">org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.KeyValueHeap.java">org/apache/hadoop/hbase/regionserver/KeyValueHeap.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.KeyValueScanner.java">org/apache/hadoop/hbase/regionserver/KeyValueScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.LeaseException.java">org/apache/hadoop/hbase/regionserver/LeaseException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.Leases.java">org/apache/hadoop/hbase/regionserver/Leases.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.LogRoller.java">org/apache/hadoop/hbase/regionserver/LogRoller.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MemStore.java">org/apache/hadoop/hbase/regionserver/MemStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MemStoreCompactor.java">org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MemStoreFlusher.java">org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>29</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MemStoreLAB.java">org/apache/hadoop/hbase/regionserver/MemStoreLAB.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MemStoreLABImpl.java">org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MemStoreMergerSegmentsIterator.java">org/apache/hadoop/hbase/regionserver/MemStoreMergerSegmentsIterator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MemStoreSegmentsIterator.java">org/apache/hadoop/hbase/regionserver/MemStoreSegmentsIterator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MemStoreSnapshot.java">org/apache/hadoop/hbase/regionserver/MemStoreSnapshot.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MetricsRegion.java">org/apache/hadoop/hbase/regionserver/MetricsRegion.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MetricsRegionServer.java">org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperImpl.java">org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperStub.java">org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperStub.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MetricsTable.java">org/apache/hadoop/hbase/regionserver/MetricsTable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MetricsTableSource.java">org/apache/hadoop/hbase/regionserver/MetricsTableSource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MetricsTableWrapperAggregateImpl.java">org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MetricsTableWrapperStub.java">org/apache/hadoop/hbase/regionserver/MetricsTableWrapperStub.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress.java">org/apache/hadoop/hbase/regionserver/MiniBatchOperationInProgress.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MobReferenceOnlyFilter.java">org/apache/hadoop/hbase/regionserver/MobReferenceOnlyFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.java">org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MutableOnlineRegions.java">org/apache/hadoop/hbase/regionserver/MutableOnlineRegions.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.MutableSegment.java">org/apache/hadoop/hbase/regionserver/MutableSegment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.NoTagByteBufferChunkKeyValue.java">org/apache/hadoop/hbase/regionserver/NoTagByteBufferChunkKeyValue.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.NonLazyKeyValueScanner.java">org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.NonReversedNonLazyKeyValueScanner.java">org/apache/hadoop/hbase/regionserver/NonReversedNonLazyKeyValueScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.OOMERegionServer.java">org/apache/hadoop/hbase/regionserver/OOMERegionServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.OnlineRegions.java">org/apache/hadoop/hbase/regionserver/OnlineRegions.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.OperationStatus.java">org/apache/hadoop/hbase/regionserver/OperationStatus.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RSDumpServlet.java">org/apache/hadoop/hbase/regionserver/RSDumpServlet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RSRpcServices.java">org/apache/hadoop/hbase/regionserver/RSRpcServices.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>64</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RSStatusServlet.java">org/apache/hadoop/hbase/regionserver/RSStatusServlet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.Region.java">org/apache/hadoop/hbase/regionserver/Region.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>25</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RegionAsTable.java">org/apache/hadoop/hbase/regionserver/RegionAsTable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>41</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.java">org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>79</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RegionScanner.java">org/apache/hadoop/hbase/regionserver/RegionScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RegionServerAccounting.java">org/apache/hadoop/hbase/regionserver/RegionServerAccounting.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost.java">org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RegionServerServices.java">org/apache/hadoop/hbase/regionserver/RegionServerServices.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RegionServicesForStores.java">org/apache/hadoop/hbase/regionserver/RegionServicesForStores.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RegionSplitPolicy.java">org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ReplicationSinkService.java">org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ReversedKeyValueHeap.java">org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ReversedRegionScannerImpl.java">org/apache/hadoop/hbase/regionserver/ReversedRegionScannerImpl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ReversedStoreScanner.java">org/apache/hadoop/hbase/regionserver/ReversedStoreScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RowProcessor.java">org/apache/hadoop/hbase/regionserver/RowProcessor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.java">org/apache/hadoop/hbase/regionserver/RpcSchedulerFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ScanInfo.java">org/apache/hadoop/hbase/regionserver/ScanInfo.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ScanOptions.java">org/apache/hadoop/hbase/regionserver/ScanOptions.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ScannerContext.java">org/apache/hadoop/hbase/regionserver/ScannerContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ScannerIdGenerator.java">org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.SecureBulkLoadManager.java">org/apache/hadoop/hbase/regionserver/SecureBulkLoadManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.Segment.java">org/apache/hadoop/hbase/regionserver/Segment.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.SegmentFactory.java">org/apache/hadoop/hbase/regionserver/SegmentFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.SegmentScanner.java">org/apache/hadoop/hbase/regionserver/SegmentScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ServerNonceManager.java">org/apache/hadoop/hbase/regionserver/ServerNonceManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ShipperListener.java">org/apache/hadoop/hbase/regionserver/ShipperListener.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.ShutdownHook.java">org/apache/hadoop/hbase/regionserver/ShutdownHook.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.java">org/apache/hadoop/hbase/regionserver/SimpleRpcSchedulerFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.SplitRequest.java">org/apache/hadoop/hbase/regionserver/SplitRequest.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.SteppingSplitPolicy.java">org/apache/hadoop/hbase/regionserver/SteppingSplitPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.Store.java">org/apache/hadoop/hbase/regionserver/Store.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StoreFileComparators.java">org/apache/hadoop/hbase/regionserver/StoreFileComparators.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StoreFileInfo.java">org/apache/hadoop/hbase/regionserver/StoreFileInfo.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>18</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StoreFileManager.java">org/apache/hadoop/hbase/regionserver/StoreFileManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StoreFileReader.java">org/apache/hadoop/hbase/regionserver/StoreFileReader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StoreFileScanner.java">org/apache/hadoop/hbase/regionserver/StoreFileScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StoreFileWriter.java">org/apache/hadoop/hbase/regionserver/StoreFileWriter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StoreFlushContext.java">org/apache/hadoop/hbase/regionserver/StoreFlushContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StoreFlusher.java">org/apache/hadoop/hbase/regionserver/StoreFlusher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StoreScanner.java">org/apache/hadoop/hbase/regionserver/StoreScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>21</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StoreUtils.java">org/apache/hadoop/hbase/regionserver/StoreUtils.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StorefileRefresherChore.java">org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StripeMultiFileWriter.java">org/apache/hadoop/hbase/regionserver/StripeMultiFileWriter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StripeStoreConfig.java">org/apache/hadoop/hbase/regionserver/StripeStoreConfig.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StripeStoreEngine.java">org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StripeStoreFileManager.java">org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>14</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.StripeStoreFlusher.java">org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestAtomicOperation.java">org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestBlocksRead.java">org/apache/hadoop/hbase/regionserver/TestBlocksRead.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestBlocksScanned.java">org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestBulkLoad.java">org/apache/hadoop/hbase/regionserver/TestBulkLoad.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestCacheOnWriteInSchema.java">org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestCellFlatSet.java">org/apache/hadoop/hbase/regionserver/TestCellFlatSet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestCellSkipListSet.java">org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestClearRegionBlockCache.java">org/apache/hadoop/hbase/regionserver/TestClearRegionBlockCache.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestColumnSeeking.java">org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestCompactSplitThread.java">org/apache/hadoop/hbase/regionserver/TestCompactSplitThread.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestCompactingMemStore.java">org/apache/hadoop/hbase/regionserver/TestCompactingMemStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestCompactingToCellFlatMapMemStore.java">org/apache/hadoop/hbase/regionserver/TestCompactingToCellFlatMapMemStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestCompaction.java">org/apache/hadoop/hbase/regionserver/TestCompaction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestCompactionState.java">org/apache/hadoop/hbase/regionserver/TestCompactionState.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestCompoundBloomFilter.java">org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestDateTieredCompactionPolicy.java">org/apache/hadoop/hbase/regionserver/TestDateTieredCompactionPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestDateTieredCompactionPolicyOverflow.java">org/apache/hadoop/hbase/regionserver/TestDateTieredCompactionPolicyOverflow.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestDefaultMemStore.java">org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>17</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestDeleteMobTable.java">org/apache/hadoop/hbase/regionserver/TestDeleteMobTable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestEndToEndSplitTransaction.java">org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestFSErrorsExposed.java">org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestFailedAppendAndSync.java">org/apache/hadoop/hbase/regionserver/TestFailedAppendAndSync.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestGetClosestAtOrBefore.java">org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHMobStore.java">org/apache/hadoop/hbase/regionserver/TestHMobStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHRegion.java">org/apache/hadoop/hbase/regionserver/TestHRegion.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>29</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHRegionFileSystem.java">org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHRegionInfo.java">org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHRegionOnCluster.java">org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHRegionReplayEvents.java">org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad.java">org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoadWithOldClient.java">org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHStore.java">org/apache/hadoop/hbase/regionserver/TestHStore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>40</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestHStoreFile.java">org/apache/hadoop/hbase/regionserver/TestHStoreFile.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>22</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestJoinedScanners.java">org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestKeepDeletes.java">org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestKeyValueHeap.java">org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestKeyValueScanFixture.java">org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMajorCompaction.java">org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMasterAddressTracker.java">org/apache/hadoop/hbase/regionserver/TestMasterAddressTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMemStoreLAB.java">org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMemstoreLABWithoutPool.java">org/apache/hadoop/hbase/regionserver/TestMemstoreLABWithoutPool.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMetricsRegion.java">org/apache/hadoop/hbase/regionserver/TestMetricsRegion.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMetricsRegionServer.java">org/apache/hadoop/hbase/regionserver/TestMetricsRegionServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMinorCompaction.java">org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMobStoreScanner.java">org/apache/hadoop/hbase/regionserver/TestMobStoreScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMultiColumnScanner.java">org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMultiLogThreshold.java">org/apache/hadoop/hbase/regionserver/TestMultiLogThreshold.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestMultiVersionConcurrencyControl.java">org/apache/hadoop/hbase/regionserver/TestMultiVersionConcurrencyControl.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestParallelPut.java">org/apache/hadoop/hbase/regionserver/TestParallelPut.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestPerColumnFamilyFlush.java">org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestPriorityRpc.java">org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestQosFunction.java">org/apache/hadoop/hbase/regionserver/TestQosFunction.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRSKilledWhenInitializing.java">org/apache/hadoop/hbase/regionserver/TestRSKilledWhenInitializing.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRecoveredEdits.java">org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionIncrement.java">org/apache/hadoop/hbase/regionserver/TestRegionIncrement.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionInfoBuilder.java">org/apache/hadoop/hbase/regionserver/TestRegionInfoBuilder.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionMergeTransactionOnCluster.java">org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionOpen.java">org/apache/hadoop/hbase/regionserver/TestRegionOpen.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover.java">org/apache/hadoop/hbase/regionserver/TestRegionReplicaFailover.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionReplicas.java">org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionServerAbort.java">org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionServerHostname.java">org/apache/hadoop/hbase/regionserver/TestRegionServerHostname.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionServerMetrics.java">org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.java">org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionServerOnlineConfigChange.java">org/apache/hadoop/hbase/regionserver/TestRegionServerOnlineConfigChange.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionServerReadRequestMetrics.java">org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionServerReportForDuty.java">org/apache/hadoop/hbase/regionserver/TestRegionServerReportForDuty.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRegionSplitPolicy.java">org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestResettingCounters.java">org/apache/hadoop/hbase/regionserver/TestResettingCounters.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestReversibleScanners.java">org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>15</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestRowTooBig.java">org/apache/hadoop/hbase/regionserver/TestRowTooBig.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestScanWithBloomError.java">org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestScanner.java">org/apache/hadoop/hbase/regionserver/TestScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestScannerHeartbeatMessages.java">org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestScannerRetriableFailure.java">org/apache/hadoop/hbase/regionserver/TestScannerRetriableFailure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestScannerWithBulkload.java">org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestSeekOptimizations.java">org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestServerNonceManager.java">org/apache/hadoop/hbase/regionserver/TestServerNonceManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestSettingTimeoutOnBlockingPoint.java">org/apache/hadoop/hbase/regionserver/TestSettingTimeoutOnBlockingPoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestSimpleTimeRangeTracker.java">org/apache/hadoop/hbase/regionserver/TestSimpleTimeRangeTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestSplitLogWorker.java">org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestSplitTransactionOnCluster.java">org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>27</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestStoreFileInfo.java">org/apache/hadoop/hbase/regionserver/TestStoreFileInfo.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestStoreFileRefresherChore.java">org/apache/hadoop/hbase/regionserver/TestStoreFileRefresherChore.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestStoreScanner.java">org/apache/hadoop/hbase/regionserver/TestStoreScanner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestStripeStoreFileManager.java">org/apache/hadoop/hbase/regionserver/TestStripeStoreFileManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestSyncTimeRangeTracker.java">org/apache/hadoop/hbase/regionserver/TestSyncTimeRangeTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestTags.java">org/apache/hadoop/hbase/regionserver/TestTags.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestTimestampFilterSeekHint.java">org/apache/hadoop/hbase/regionserver/TestTimestampFilterSeekHint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestWALLockup.java">org/apache/hadoop/hbase/regionserver/TestWALLockup.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestWALMonotonicallyIncreasingSeqId.java">org/apache/hadoop/hbase/regionserver/TestWALMonotonicallyIncreasingSeqId.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TestWalAndCompactingMemStoreFlush.java">org/apache/hadoop/hbase/regionserver/TestWalAndCompactingMemStoreFlush.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.TimeRangeTracker.java">org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration.java">org/apache/hadoop/hbase/regionserver/compactions/CompactionConfiguration.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress.java">org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest.java">org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.CompactionRequester.java">org/apache/hadoop/hbase/regionserver/compactions/CompactionRequester.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.Compactor.java">org/apache/hadoop/hbase/regionserver/compactions/Compactor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.CurrentHourProvider.java">org/apache/hadoop/hbase/regionserver/compactions/CurrentHourProvider.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor.java">org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.OffPeakHours.java">org/apache/hadoop/hbase/regionserver/compactions/OffPeakHours.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.PerfTestCompactionPolicies.java">org/apache/hadoop/hbase/regionserver/compactions/PerfTestCompactionPolicies.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy.java">org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.SortedCompactionPolicy.java">org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.SpikyFileListGenerator.java">org/apache/hadoop/hbase/regionserver/compactions/SpikyFileListGenerator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy.java">org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor.java">org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.java">org/apache/hadoop/hbase/regionserver/compactions/TestCompactor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.compactions.TestStripeCompactionPolicy.java">org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.handler.CloseRegionHandler.java">org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.handler.OpenPriorityRegionHandler.java">org/apache/hadoop/hbase/regionserver/handler/OpenPriorityRegionHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler.java">org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.handler.ParallelSeekHandler.java">org/apache/hadoop/hbase/regionserver/handler/ParallelSeekHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.handler.RegionReplicaFlushHandler.java">org/apache/hadoop/hbase/regionserver/handler/RegionReplicaFlushHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.handler.WALSplitterHandler.java">org/apache/hadoop/hbase/regionserver/handler/WALSplitterHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>22</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.ColumnTracker.java">org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.CompactionScanQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/CompactionScanQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.java">org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.DropDeletesCompactionScanQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/DropDeletesCompactionScanQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.ExplicitColumnTracker.java">org/apache/hadoop/hbase/regionserver/querymatcher/ExplicitColumnTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.IncludeAllCompactionQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/IncludeAllCompactionQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.MajorCompactionScanQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/MajorCompactionScanQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.MinorCompactionScanQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/MinorCompactionScanQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.NewVersionBehaviorTracker.java">org/apache/hadoop/hbase/regionserver/querymatcher/NewVersionBehaviorTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>20</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.NormalUserScanQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/NormalUserScanQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.RawScanQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/RawScanQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.ScanDeleteTracker.java">org/apache/hadoop/hbase/regionserver/querymatcher/ScanDeleteTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.ScanWildcardColumnTracker.java">org/apache/hadoop/hbase/regionserver/querymatcher/ScanWildcardColumnTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.TestExplicitColumnTracker.java">org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.TestScanDeleteTracker.java">org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.TestUserScanQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>17</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher.java">org/apache/hadoop/hbase/regionserver/querymatcher/UserScanQueryMatcher.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.snapshot.FlushSnapshotSubprocedure.java">org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager.java">org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>19</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory.java">org/apache/hadoop/hbase/regionserver/throttle/CompactionThroughputControllerFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.throttle.FlushThroughputControllerFactory.java">org/apache/hadoop/hbase/regionserver/throttle/FlushThroughputControllerFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController.java">org/apache/hadoop/hbase/regionserver/throttle/NoLimitThroughputController.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.throttle.PressureAwareCompactionThroughputController.java">org/apache/hadoop/hbase/regionserver/throttle/PressureAwareCompactionThroughputController.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.throttle.PressureAwareFlushThroughputController.java">org/apache/hadoop/hbase/regionserver/throttle/PressureAwareFlushThroughputController.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.throttle.PressureAwareThroughputController.java">org/apache/hadoop/hbase/regionserver/throttle/PressureAwareThroughputController.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.throttle.ThroughputController.java">org/apache/hadoop/hbase/regionserver/throttle/ThroughputController.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL.java">org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter.java">org/apache/hadoop/hbase/regionserver/wal/AbstractProtobufLogWriter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.AbstractTestFSWAL.java">org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.AbstractTestLogRolling.java">org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.AbstractTestProtobufLog.java">org/apache/hadoop/hbase/regionserver/wal/AbstractTestProtobufLog.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.AbstractTestWALReplay.java">org/apache/hadoop/hbase/regionserver/wal/AbstractTestWALReplay.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>38</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.CompressionContext.java">org/apache/hadoop/hbase/regionserver/wal/CompressionContext.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.Compressor.java">org/apache/hadoop/hbase/regionserver/wal/Compressor.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.FSHLog.java">org/apache/hadoop/hbase/regionserver/wal/FSHLog.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>8</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.FSWALEntry.java">org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException.java">org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.FailedSyncBeforeLogCloseException.java">org/apache/hadoop/hbase/regionserver/wal/FailedSyncBeforeLogCloseException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.FaultyProtobufLogReader.java">org/apache/hadoop/hbase/regionserver/wal/FaultyProtobufLogReader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.InstrumentedLogWriter.java">org/apache/hadoop/hbase/regionserver/wal/InstrumentedLogWriter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.MetricsWAL.java">org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.java">org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>20</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.ReaderBase.java">org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader.java">org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogReader.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter.java">org/apache/hadoop/hbase/regionserver/wal/SecureProtobufLogWriter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.SecureWALCellCodec.java">org/apache/hadoop/hbase/regionserver/wal/SecureWALCellCodec.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.SequenceIdAccounting.java">org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.TestLogRolling.java">org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.TestLogRollingNoCluster.java">org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.TestWALConfiguration.java">org/apache/hadoop/hbase/regionserver/wal/TestWALConfiguration.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.WALActionsListener.java">org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.WALCellCodec.java">org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost.java">org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.regionserver.wal.WALUtil.java">org/apache/hadoop/hbase/regionserver/wal/WALUtil.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.BaseReplicationEndpoint.java">org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.BulkLoadCellFilter.java">org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.ClusterMarkingEntryFilter.java">org/apache/hadoop/hbase/replication/ClusterMarkingEntryFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint.java">org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.ReplicationEndpoint.java">org/apache/hadoop/hbase/replication/ReplicationEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.ReplicationException.java">org/apache/hadoop/hbase/replication/ReplicationException.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.SystemTableWALEntryFilter.java">org/apache/hadoop/hbase/replication/SystemTableWALEntryFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.TestMasterReplication.java">org/apache/hadoop/hbase/replication/TestMasterReplication.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.TestMultiSlaveReplication.java">org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.TestPerTableCFReplication.java">org/apache/hadoop/hbase/replication/TestPerTableCFReplication.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.TestReplicationDisableInactivePeer.java">org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.TestReplicationEndpoint.java">org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>7</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.TestReplicationKillMasterRSCompressed.java">org/apache/hadoop/hbase/replication/TestReplicationKillMasterRSCompressed.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.TestReplicationWithTags.java">org/apache/hadoop/hbase/replication/TestReplicationWithTags.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.TestVerifyReplication.java">org/apache/hadoop/hbase/replication/TestVerifyReplication.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.WALCellFilter.java">org/apache/hadoop/hbase/replication/WALCellFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.WALEntryFilter.java">org/apache/hadoop/hbase/replication/WALEntryFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.ZKReplicationQueueStorage.java">org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.master.ReplicationLogCleaner.java">org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.DumpReplicationQueues.java">org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.HFileReplicator.java">org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.MetricsSink.java">org/apache/hadoop/hbase/replication/regionserver/MetricsSink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.MetricsSource.java">org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.RegionReplicaReplicationEndpoint.java">org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.Replication.java">org/apache/hadoop/hbase/replication/regionserver/Replication.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.ReplicationLoad.java">org/apache/hadoop/hbase/replication/regionserver/ReplicationLoad.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>6</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.ReplicationObserver.java">org/apache/hadoop/hbase/replication/regionserver/ReplicationObserver.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.ReplicationSink.java">org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceFactory.java">org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceFactory.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceInterface.java">org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceInterface.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.ReplicationSourceManager.java">org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.ReplicationThrottler.java">org/apache/hadoop/hbase/replication/regionserver/ReplicationThrottler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.TestGlobalReplicationThrottler.java">org/apache/hadoop/hbase/replication/regionserver/TestGlobalReplicationThrottler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.TestRegionReplicaReplicationEndpointNoMaster.java">org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.TestReplicationSink.java">org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>11</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.TestReplicationSource.java">org/apache/hadoop/hbase/replication/regionserver/TestReplicationSource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>10</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.TestReplicationSourceManager.java">org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.TestWALEntrySinkFilter.java">org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.TestWALEntryStream.java">org/apache/hadoop/hbase/replication/regionserver/TestWALEntryStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.WALEntrySinkFilter.java">org/apache/hadoop/hbase/replication/regionserver/WALEntrySinkFilter.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.replication.regionserver.WALEntryStream.java">org/apache/hadoop/hbase/replication/regionserver/WALEntryStream.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.ExistsResource.java">org/apache/hadoop/hbase/rest/ExistsResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.HBaseRESTTestingUtility.java">org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>13</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.MetricsREST.java">org/apache/hadoop/hbase/rest/MetricsREST.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.MultiRowResource.java">org/apache/hadoop/hbase/rest/MultiRowResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.NamespacesInstanceResource.java">org/apache/hadoop/hbase/rest/NamespacesInstanceResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.NamespacesResource.java">org/apache/hadoop/hbase/rest/NamespacesResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.PerformanceEvaluation.java">org/apache/hadoop/hbase/rest/PerformanceEvaluation.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>21</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.ProtobufMessageHandler.java">org/apache/hadoop/hbase/rest/ProtobufMessageHandler.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.RESTServer.java">org/apache/hadoop/hbase/rest/RESTServer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>16</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.RESTServlet.java">org/apache/hadoop/hbase/rest/RESTServlet.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>5</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.RESTServletContainer.java">org/apache/hadoop/hbase/rest/RESTServletContainer.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.RegionsResource.java">org/apache/hadoop/hbase/rest/RegionsResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.ResourceBase.java">org/apache/hadoop/hbase/rest/ResourceBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.ResultGenerator.java">org/apache/hadoop/hbase/rest/ResultGenerator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.RootResource.java">org/apache/hadoop/hbase/rest/RootResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.RowResource.java">org/apache/hadoop/hbase/rest/RowResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>52</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.RowResourceBase.java">org/apache/hadoop/hbase/rest/RowResourceBase.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>15</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.RowSpec.java">org/apache/hadoop/hbase/rest/RowSpec.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>32</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.ScannerInstanceResource.java">org/apache/hadoop/hbase/rest/ScannerInstanceResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.ScannerResource.java">org/apache/hadoop/hbase/rest/ScannerResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.ScannerResultGenerator.java">org/apache/hadoop/hbase/rest/ScannerResultGenerator.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.SchemaResource.java">org/apache/hadoop/hbase/rest/SchemaResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>9</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.StorageClusterStatusResource.java">org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.StorageClusterVersionResource.java">org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.TableResource.java">org/apache/hadoop/hbase/rest/TableResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.TableScanResource.java">org/apache/hadoop/hbase/rest/TableScanResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.TestGetAndPutResource.java">org/apache/hadoop/hbase/rest/TestGetAndPutResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>4</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.TestMultiRowResource.java">org/apache/hadoop/hbase/rest/TestMultiRowResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.TestNamespacesInstanceResource.java">org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.TestNamespacesResource.java">org/apache/hadoop/hbase/rest/TestNamespacesResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.TestScannerResource.java">org/apache/hadoop/hbase/rest/TestScannerResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.TestScannersWithFilters.java">org/apache/hadoop/hbase/rest/TestScannersWithFilters.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>144</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.TestScannersWithLabels.java">org/apache/hadoop/hbase/rest/TestScannersWithLabels.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>2</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.TestSchemaResource.java">org/apache/hadoop/hbase/rest/TestSchemaResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>1</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.TestTableScan.java">org/apache/hadoop/hbase/rest/TestTableScan.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>12</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.VersionResource.java">org/apache/hadoop/hbase/rest/VersionResource.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>3</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.client.Client.java">org/apache/hadoop/hbase/rest/client/Client.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>28</td></tr>
-<tr class="b">
+<tr class="a">
 <td><a href="#org.apache.hadoop.hbase.rest.client.RemoteAdmin.java">org/apache/hadoop/hbase/rest/client/RemoteAdmin.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>107</td></tr>
-<tr class="a">
+<tr class="b">
 <td><a href="#org.apache.hadoop.hbase.rest.client.RemoteHTable.java">org/apache/hadoop/hbase/rest/client/RemoteHTable.java</a></td>
 <td>0</td>
 <td>0</td>
 <td>122</td></tr>
-<tr class="b

<TRUNCATED>

[06/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.BlockReaderWrapper.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.BlockReaderWrapper.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.BlockReaderWrapper.html
index 76a9ecc..920f7ff 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.BlockReaderWrapper.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.BlockReaderWrapper.html
@@ -532,271 +532,264 @@
 <span class="sourceLineNo">524</span>  * @throws IOException<a name="line.524"></a>
 <span class="sourceLineNo">525</span>  */<a name="line.525"></a>
 <span class="sourceLineNo">526</span>  @Test<a name="line.526"></a>
-<span class="sourceLineNo">527</span> public void testMidKeyOnLeafIndexBlockBoundary() throws IOException {<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),<a name="line.528"></a>
-<span class="sourceLineNo">529</span>       "hfile_for_midkey");<a name="line.529"></a>
-<span class="sourceLineNo">530</span>   int maxChunkSize = 512;<a name="line.530"></a>
-<span class="sourceLineNo">531</span>   conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);<a name="line.531"></a>
-<span class="sourceLineNo">532</span>   // should open hfile.block.index.cacheonwrite<a name="line.532"></a>
-<span class="sourceLineNo">533</span>   conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.533"></a>
-<span class="sourceLineNo">534</span><a name="line.534"></a>
-<span class="sourceLineNo">535</span>   CacheConfig cacheConf = new CacheConfig(conf);<a name="line.535"></a>
-<span class="sourceLineNo">536</span>   BlockCache blockCache = cacheConf.getBlockCache();<a name="line.536"></a>
-<span class="sourceLineNo">537</span>   // Evict all blocks that were cached-on-write by the previous invocation.<a name="line.537"></a>
-<span class="sourceLineNo">538</span>   blockCache.evictBlocksByHfileName(hfilePath.getName());<a name="line.538"></a>
-<span class="sourceLineNo">539</span>   // Write the HFile<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   {<a name="line.540"></a>
-<span class="sourceLineNo">541</span>     HFileContext meta = new HFileContextBuilder()<a name="line.541"></a>
-<span class="sourceLineNo">542</span>                         .withBlockSize(SMALL_BLOCK_SIZE)<a name="line.542"></a>
-<span class="sourceLineNo">543</span>                         .withCompression(Algorithm.NONE)<a name="line.543"></a>
-<span class="sourceLineNo">544</span>                         .withDataBlockEncoding(DataBlockEncoding.NONE)<a name="line.544"></a>
-<span class="sourceLineNo">545</span>                         .build();<a name="line.545"></a>
-<span class="sourceLineNo">546</span>     HFile.Writer writer =<a name="line.546"></a>
-<span class="sourceLineNo">547</span>           HFile.getWriterFactory(conf, cacheConf)<a name="line.547"></a>
-<span class="sourceLineNo">548</span>               .withPath(fs, hfilePath)<a name="line.548"></a>
-<span class="sourceLineNo">549</span>               .withFileContext(meta)<a name="line.549"></a>
-<span class="sourceLineNo">550</span>               .create();<a name="line.550"></a>
-<span class="sourceLineNo">551</span>     Random rand = new Random(19231737);<a name="line.551"></a>
-<span class="sourceLineNo">552</span>     byte[] family = Bytes.toBytes("f");<a name="line.552"></a>
-<span class="sourceLineNo">553</span>     byte[] qualifier = Bytes.toBytes("q");<a name="line.553"></a>
-<span class="sourceLineNo">554</span>     int kvNumberToBeWritten = 16;<a name="line.554"></a>
-<span class="sourceLineNo">555</span>     // the new generated hfile will contain 2 leaf-index blocks and 16 data blocks,<a name="line.555"></a>
-<span class="sourceLineNo">556</span>     // midkey is just on the boundary of the first leaf-index block<a name="line.556"></a>
-<span class="sourceLineNo">557</span>     for (int i = 0; i &lt; kvNumberToBeWritten; ++i) {<a name="line.557"></a>
-<span class="sourceLineNo">558</span>       byte[] row = RandomKeyValueUtil.randomOrderedFixedLengthKey(rand, i, 30);<a name="line.558"></a>
-<span class="sourceLineNo">559</span><a name="line.559"></a>
-<span class="sourceLineNo">560</span>       // Key will be interpreted by KeyValue.KEY_COMPARATOR<a name="line.560"></a>
-<span class="sourceLineNo">561</span>       KeyValue kv =<a name="line.561"></a>
-<span class="sourceLineNo">562</span>             new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),<a name="line.562"></a>
-<span class="sourceLineNo">563</span>                 RandomKeyValueUtil.randomFixedLengthValue(rand, SMALL_BLOCK_SIZE));<a name="line.563"></a>
-<span class="sourceLineNo">564</span>       writer.append(kv);<a name="line.564"></a>
-<span class="sourceLineNo">565</span>     }<a name="line.565"></a>
-<span class="sourceLineNo">566</span>     writer.close();<a name="line.566"></a>
-<span class="sourceLineNo">567</span>   }<a name="line.567"></a>
-<span class="sourceLineNo">568</span><a name="line.568"></a>
-<span class="sourceLineNo">569</span>   // close hfile.block.index.cacheonwrite<a name="line.569"></a>
-<span class="sourceLineNo">570</span>   conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false);<a name="line.570"></a>
-<span class="sourceLineNo">571</span><a name="line.571"></a>
-<span class="sourceLineNo">572</span>   // Read the HFile<a name="line.572"></a>
-<span class="sourceLineNo">573</span>   HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);<a name="line.573"></a>
-<span class="sourceLineNo">574</span><a name="line.574"></a>
-<span class="sourceLineNo">575</span>   boolean hasArrayIndexOutOfBoundsException = false;<a name="line.575"></a>
-<span class="sourceLineNo">576</span>   try {<a name="line.576"></a>
-<span class="sourceLineNo">577</span>     // get the mid-key.<a name="line.577"></a>
-<span class="sourceLineNo">578</span>     reader.midKey();<a name="line.578"></a>
-<span class="sourceLineNo">579</span>   } catch (ArrayIndexOutOfBoundsException e) {<a name="line.579"></a>
-<span class="sourceLineNo">580</span>     hasArrayIndexOutOfBoundsException = true;<a name="line.580"></a>
-<span class="sourceLineNo">581</span>   } finally {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>     reader.close();<a name="line.582"></a>
-<span class="sourceLineNo">583</span>   }<a name="line.583"></a>
-<span class="sourceLineNo">584</span><a name="line.584"></a>
-<span class="sourceLineNo">585</span>   // to check if ArrayIndexOutOfBoundsException occurred<a name="line.585"></a>
-<span class="sourceLineNo">586</span>   assertFalse(hasArrayIndexOutOfBoundsException);<a name="line.586"></a>
-<span class="sourceLineNo">587</span> }<a name="line.587"></a>
-<span class="sourceLineNo">588</span><a name="line.588"></a>
-<span class="sourceLineNo">589</span>  /**<a name="line.589"></a>
-<span class="sourceLineNo">590</span>   * Testing block index through the HFile writer/reader APIs. Allows to test<a name="line.590"></a>
-<span class="sourceLineNo">591</span>   * setting index block size through configuration, intermediate-level index<a name="line.591"></a>
-<span class="sourceLineNo">592</span>   * blocks, and caching index blocks on write.<a name="line.592"></a>
-<span class="sourceLineNo">593</span>   *<a name="line.593"></a>
-<span class="sourceLineNo">594</span>   * @throws IOException<a name="line.594"></a>
-<span class="sourceLineNo">595</span>   */<a name="line.595"></a>
-<span class="sourceLineNo">596</span>  @Test<a name="line.596"></a>
-<span class="sourceLineNo">597</span>  public void testHFileWriterAndReader() throws IOException {<a name="line.597"></a>
-<span class="sourceLineNo">598</span>    Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),<a name="line.598"></a>
-<span class="sourceLineNo">599</span>        "hfile_for_block_index");<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.600"></a>
-<span class="sourceLineNo">601</span>    BlockCache blockCache = cacheConf.getBlockCache();<a name="line.601"></a>
-<span class="sourceLineNo">602</span><a name="line.602"></a>
-<span class="sourceLineNo">603</span>    for (int testI = 0; testI &lt; INDEX_CHUNK_SIZES.length; ++testI) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      int indexBlockSize = INDEX_CHUNK_SIZES[testI];<a name="line.604"></a>
-<span class="sourceLineNo">605</span>      int expectedNumLevels = EXPECTED_NUM_LEVELS[testI];<a name="line.605"></a>
-<span class="sourceLineNo">606</span>      LOG.info("Index block size: " + indexBlockSize + ", compression: "<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          + compr);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>      // Evict all blocks that were cached-on-write by the previous invocation.<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      blockCache.evictBlocksByHfileName(hfilePath.getName());<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>      conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize);<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      Set&lt;String&gt; keyStrSet = new HashSet&lt;&gt;();<a name="line.612"></a>
-<span class="sourceLineNo">613</span>      byte[][] keys = new byte[NUM_KV][];<a name="line.613"></a>
-<span class="sourceLineNo">614</span>      byte[][] values = new byte[NUM_KV][];<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>      // Write the HFile<a name="line.616"></a>
-<span class="sourceLineNo">617</span>      {<a name="line.617"></a>
-<span class="sourceLineNo">618</span>        HFileContext meta = new HFileContextBuilder()<a name="line.618"></a>
-<span class="sourceLineNo">619</span>                            .withBlockSize(SMALL_BLOCK_SIZE)<a name="line.619"></a>
-<span class="sourceLineNo">620</span>                            .withCompression(compr)<a name="line.620"></a>
-<span class="sourceLineNo">621</span>                            .build();<a name="line.621"></a>
-<span class="sourceLineNo">622</span>        HFile.Writer writer =<a name="line.622"></a>
-<span class="sourceLineNo">623</span>            HFile.getWriterFactory(conf, cacheConf)<a name="line.623"></a>
-<span class="sourceLineNo">624</span>                .withPath(fs, hfilePath)<a name="line.624"></a>
-<span class="sourceLineNo">625</span>                .withFileContext(meta)<a name="line.625"></a>
-<span class="sourceLineNo">626</span>                .create();<a name="line.626"></a>
-<span class="sourceLineNo">627</span>        Random rand = new Random(19231737);<a name="line.627"></a>
-<span class="sourceLineNo">628</span>        byte[] family = Bytes.toBytes("f");<a name="line.628"></a>
-<span class="sourceLineNo">629</span>        byte[] qualifier = Bytes.toBytes("q");<a name="line.629"></a>
-<span class="sourceLineNo">630</span>        for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.630"></a>
-<span class="sourceLineNo">631</span>          byte[] row = RandomKeyValueUtil.randomOrderedKey(rand, i);<a name="line.631"></a>
-<span class="sourceLineNo">632</span><a name="line.632"></a>
-<span class="sourceLineNo">633</span>          // Key will be interpreted by KeyValue.KEY_COMPARATOR<a name="line.633"></a>
-<span class="sourceLineNo">634</span>          KeyValue kv =<a name="line.634"></a>
-<span class="sourceLineNo">635</span>              new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),<a name="line.635"></a>
-<span class="sourceLineNo">636</span>                  RandomKeyValueUtil.randomValue(rand));<a name="line.636"></a>
-<span class="sourceLineNo">637</span>          byte[] k = kv.getKey();<a name="line.637"></a>
-<span class="sourceLineNo">638</span>          writer.append(kv);<a name="line.638"></a>
-<span class="sourceLineNo">639</span>          keys[i] = k;<a name="line.639"></a>
-<span class="sourceLineNo">640</span>          values[i] = CellUtil.cloneValue(kv);<a name="line.640"></a>
-<span class="sourceLineNo">641</span>          keyStrSet.add(Bytes.toStringBinary(k));<a name="line.641"></a>
-<span class="sourceLineNo">642</span>          if (i &gt; 0) {<a name="line.642"></a>
-<span class="sourceLineNo">643</span>            assertTrue((PrivateCellUtil.compare(CellComparatorImpl.COMPARATOR, kv, keys[i - 1],<a name="line.643"></a>
-<span class="sourceLineNo">644</span>                0, keys[i - 1].length)) &gt; 0);<a name="line.644"></a>
-<span class="sourceLineNo">645</span>          }<a name="line.645"></a>
-<span class="sourceLineNo">646</span>        }<a name="line.646"></a>
-<span class="sourceLineNo">647</span><a name="line.647"></a>
-<span class="sourceLineNo">648</span>        writer.close();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>      }<a name="line.649"></a>
-<span class="sourceLineNo">650</span><a name="line.650"></a>
-<span class="sourceLineNo">651</span>      // Read the HFile<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      assertEquals(expectedNumLevels,<a name="line.653"></a>
-<span class="sourceLineNo">654</span>          reader.getTrailer().getNumDataIndexLevels());<a name="line.654"></a>
-<span class="sourceLineNo">655</span><a name="line.655"></a>
-<span class="sourceLineNo">656</span>      assertTrue(Bytes.equals(keys[0], ((KeyValue)reader.getFirstKey().get()).getKey()));<a name="line.656"></a>
-<span class="sourceLineNo">657</span>      assertTrue(Bytes.equals(keys[NUM_KV - 1], ((KeyValue)reader.getLastKey().get()).getKey()));<a name="line.657"></a>
-<span class="sourceLineNo">658</span>      LOG.info("Last key: " + Bytes.toStringBinary(keys[NUM_KV - 1]));<a name="line.658"></a>
-<span class="sourceLineNo">659</span><a name="line.659"></a>
-<span class="sourceLineNo">660</span>      for (boolean pread : new boolean[] { false, true }) {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>        HFileScanner scanner = reader.getScanner(true, pread);<a name="line.661"></a>
-<span class="sourceLineNo">662</span>        for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.662"></a>
-<span class="sourceLineNo">663</span>          checkSeekTo(keys, scanner, i);<a name="line.663"></a>
-<span class="sourceLineNo">664</span>          checkKeyValue("i=" + i, keys[i], values[i],<a name="line.664"></a>
-<span class="sourceLineNo">665</span>              ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue());<a name="line.665"></a>
-<span class="sourceLineNo">666</span>        }<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        assertTrue(scanner.seekTo());<a name="line.667"></a>
-<span class="sourceLineNo">668</span>        for (int i = NUM_KV - 1; i &gt;= 0; --i) {<a name="line.668"></a>
-<span class="sourceLineNo">669</span>          checkSeekTo(keys, scanner, i);<a name="line.669"></a>
-<span class="sourceLineNo">670</span>          checkKeyValue("i=" + i, keys[i], values[i],<a name="line.670"></a>
-<span class="sourceLineNo">671</span>              ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue());<a name="line.671"></a>
-<span class="sourceLineNo">672</span>        }<a name="line.672"></a>
-<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
-<span class="sourceLineNo">674</span><a name="line.674"></a>
-<span class="sourceLineNo">675</span>      // Manually compute the mid-key and validate it.<a name="line.675"></a>
-<span class="sourceLineNo">676</span>      HFile.Reader reader2 = reader;<a name="line.676"></a>
-<span class="sourceLineNo">677</span>      HFileBlock.FSReader fsReader = reader2.getUncachedBlockReader();<a name="line.677"></a>
-<span class="sourceLineNo">678</span><a name="line.678"></a>
-<span class="sourceLineNo">679</span>      HFileBlock.BlockIterator iter = fsReader.blockRange(0,<a name="line.679"></a>
-<span class="sourceLineNo">680</span>          reader.getTrailer().getLoadOnOpenDataOffset());<a name="line.680"></a>
-<span class="sourceLineNo">681</span>      HFileBlock block;<a name="line.681"></a>
-<span class="sourceLineNo">682</span>      List&lt;byte[]&gt; blockKeys = new ArrayList&lt;&gt;();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>      while ((block = iter.nextBlock()) != null) {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        if (block.getBlockType() != BlockType.LEAF_INDEX)<a name="line.684"></a>
-<span class="sourceLineNo">685</span>          return;<a name="line.685"></a>
-<span class="sourceLineNo">686</span>        ByteBuff b = block.getBufferReadOnly();<a name="line.686"></a>
-<span class="sourceLineNo">687</span>        int n = b.getIntAfterPosition(0);<a name="line.687"></a>
-<span class="sourceLineNo">688</span>        // One int for the number of items, and n + 1 for the secondary index.<a name="line.688"></a>
-<span class="sourceLineNo">689</span>        int entriesOffset = Bytes.SIZEOF_INT * (n + 2);<a name="line.689"></a>
-<span class="sourceLineNo">690</span><a name="line.690"></a>
-<span class="sourceLineNo">691</span>        // Get all the keys from the leaf index block. S<a name="line.691"></a>
-<span class="sourceLineNo">692</span>        for (int i = 0; i &lt; n; ++i) {<a name="line.692"></a>
-<span class="sourceLineNo">693</span>          int keyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 1));<a name="line.693"></a>
-<span class="sourceLineNo">694</span>          int nextKeyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 2));<a name="line.694"></a>
-<span class="sourceLineNo">695</span>          int keyLen = nextKeyRelOffset - keyRelOffset;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>          int keyOffset = b.arrayOffset() + entriesOffset + keyRelOffset +<a name="line.696"></a>
-<span class="sourceLineNo">697</span>              HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD;<a name="line.697"></a>
-<span class="sourceLineNo">698</span>          byte[] blockKey = Arrays.copyOfRange(b.array(), keyOffset, keyOffset<a name="line.698"></a>
-<span class="sourceLineNo">699</span>              + keyLen);<a name="line.699"></a>
-<span class="sourceLineNo">700</span>          String blockKeyStr = Bytes.toString(blockKey);<a name="line.700"></a>
-<span class="sourceLineNo">701</span>          blockKeys.add(blockKey);<a name="line.701"></a>
+<span class="sourceLineNo">527</span>  public void testMidKeyOnLeafIndexBlockBoundary() throws IOException {<a name="line.527"></a>
+<span class="sourceLineNo">528</span>    Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_midkey");<a name="line.528"></a>
+<span class="sourceLineNo">529</span>    int maxChunkSize = 512;<a name="line.529"></a>
+<span class="sourceLineNo">530</span>    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);<a name="line.530"></a>
+<span class="sourceLineNo">531</span>    // should open hfile.block.index.cacheonwrite<a name="line.531"></a>
+<span class="sourceLineNo">532</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.532"></a>
+<span class="sourceLineNo">533</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.533"></a>
+<span class="sourceLineNo">534</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.534"></a>
+<span class="sourceLineNo">535</span>    BlockCache blockCache = cacheConf.getBlockCache();<a name="line.535"></a>
+<span class="sourceLineNo">536</span>    // Evict all blocks that were cached-on-write by the previous invocation.<a name="line.536"></a>
+<span class="sourceLineNo">537</span>    blockCache.evictBlocksByHfileName(hfilePath.getName());<a name="line.537"></a>
+<span class="sourceLineNo">538</span>    // Write the HFile<a name="line.538"></a>
+<span class="sourceLineNo">539</span>    HFileContext meta =<a name="line.539"></a>
+<span class="sourceLineNo">540</span>        new HFileContextBuilder().withBlockSize(SMALL_BLOCK_SIZE).withCompression(Algorithm.NONE)<a name="line.540"></a>
+<span class="sourceLineNo">541</span>            .withDataBlockEncoding(DataBlockEncoding.NONE).build();<a name="line.541"></a>
+<span class="sourceLineNo">542</span>    HFile.Writer writer =<a name="line.542"></a>
+<span class="sourceLineNo">543</span>        HFile.getWriterFactory(conf, cacheConf).withPath(fs, hfilePath).withFileContext(meta)<a name="line.543"></a>
+<span class="sourceLineNo">544</span>            .create();<a name="line.544"></a>
+<span class="sourceLineNo">545</span>    Random rand = new Random(19231737);<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    byte[] family = Bytes.toBytes("f");<a name="line.546"></a>
+<span class="sourceLineNo">547</span>    byte[] qualifier = Bytes.toBytes("q");<a name="line.547"></a>
+<span class="sourceLineNo">548</span>    int kvNumberToBeWritten = 16;<a name="line.548"></a>
+<span class="sourceLineNo">549</span>    // the new generated hfile will contain 2 leaf-index blocks and 16 data blocks,<a name="line.549"></a>
+<span class="sourceLineNo">550</span>    // midkey is just on the boundary of the first leaf-index block<a name="line.550"></a>
+<span class="sourceLineNo">551</span>    for (int i = 0; i &lt; kvNumberToBeWritten; ++i) {<a name="line.551"></a>
+<span class="sourceLineNo">552</span>      byte[] row = RandomKeyValueUtil.randomOrderedFixedLengthKey(rand, i, 30);<a name="line.552"></a>
+<span class="sourceLineNo">553</span><a name="line.553"></a>
+<span class="sourceLineNo">554</span>      // Key will be interpreted by KeyValue.KEY_COMPARATOR<a name="line.554"></a>
+<span class="sourceLineNo">555</span>      KeyValue kv = new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),<a name="line.555"></a>
+<span class="sourceLineNo">556</span>          RandomKeyValueUtil.randomFixedLengthValue(rand, SMALL_BLOCK_SIZE));<a name="line.556"></a>
+<span class="sourceLineNo">557</span>      writer.append(kv);<a name="line.557"></a>
+<span class="sourceLineNo">558</span>    }<a name="line.558"></a>
+<span class="sourceLineNo">559</span>    writer.close();<a name="line.559"></a>
+<span class="sourceLineNo">560</span><a name="line.560"></a>
+<span class="sourceLineNo">561</span>    // close hfile.block.index.cacheonwrite<a name="line.561"></a>
+<span class="sourceLineNo">562</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false);<a name="line.562"></a>
+<span class="sourceLineNo">563</span><a name="line.563"></a>
+<span class="sourceLineNo">564</span>    // Read the HFile<a name="line.564"></a>
+<span class="sourceLineNo">565</span>    HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);<a name="line.565"></a>
+<span class="sourceLineNo">566</span><a name="line.566"></a>
+<span class="sourceLineNo">567</span>    boolean hasArrayIndexOutOfBoundsException = false;<a name="line.567"></a>
+<span class="sourceLineNo">568</span>    try {<a name="line.568"></a>
+<span class="sourceLineNo">569</span>      // get the mid-key.<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      reader.midKey();<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    } catch (ArrayIndexOutOfBoundsException e) {<a name="line.571"></a>
+<span class="sourceLineNo">572</span>      hasArrayIndexOutOfBoundsException = true;<a name="line.572"></a>
+<span class="sourceLineNo">573</span>    } finally {<a name="line.573"></a>
+<span class="sourceLineNo">574</span>      reader.close();<a name="line.574"></a>
+<span class="sourceLineNo">575</span>    }<a name="line.575"></a>
+<span class="sourceLineNo">576</span><a name="line.576"></a>
+<span class="sourceLineNo">577</span>    // to check if ArrayIndexOutOfBoundsException occurred<a name="line.577"></a>
+<span class="sourceLineNo">578</span>    assertFalse(hasArrayIndexOutOfBoundsException);<a name="line.578"></a>
+<span class="sourceLineNo">579</span>  }<a name="line.579"></a>
+<span class="sourceLineNo">580</span><a name="line.580"></a>
+<span class="sourceLineNo">581</span>  /**<a name="line.581"></a>
+<span class="sourceLineNo">582</span>   * Testing block index through the HFile writer/reader APIs. Allows to test<a name="line.582"></a>
+<span class="sourceLineNo">583</span>   * setting index block size through configuration, intermediate-level index<a name="line.583"></a>
+<span class="sourceLineNo">584</span>   * blocks, and caching index blocks on write.<a name="line.584"></a>
+<span class="sourceLineNo">585</span>   *<a name="line.585"></a>
+<span class="sourceLineNo">586</span>   * @throws IOException<a name="line.586"></a>
+<span class="sourceLineNo">587</span>   */<a name="line.587"></a>
+<span class="sourceLineNo">588</span>  @Test<a name="line.588"></a>
+<span class="sourceLineNo">589</span>  public void testHFileWriterAndReader() throws IOException {<a name="line.589"></a>
+<span class="sourceLineNo">590</span>    Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),<a name="line.590"></a>
+<span class="sourceLineNo">591</span>        "hfile_for_block_index");<a name="line.591"></a>
+<span class="sourceLineNo">592</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.592"></a>
+<span class="sourceLineNo">593</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.593"></a>
+<span class="sourceLineNo">594</span>    BlockCache blockCache = cacheConf.getBlockCache();<a name="line.594"></a>
+<span class="sourceLineNo">595</span><a name="line.595"></a>
+<span class="sourceLineNo">596</span>    for (int testI = 0; testI &lt; INDEX_CHUNK_SIZES.length; ++testI) {<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      int indexBlockSize = INDEX_CHUNK_SIZES[testI];<a name="line.597"></a>
+<span class="sourceLineNo">598</span>      int expectedNumLevels = EXPECTED_NUM_LEVELS[testI];<a name="line.598"></a>
+<span class="sourceLineNo">599</span>      LOG.info("Index block size: " + indexBlockSize + ", compression: "<a name="line.599"></a>
+<span class="sourceLineNo">600</span>          + compr);<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      // Evict all blocks that were cached-on-write by the previous invocation.<a name="line.601"></a>
+<span class="sourceLineNo">602</span>      blockCache.evictBlocksByHfileName(hfilePath.getName());<a name="line.602"></a>
+<span class="sourceLineNo">603</span><a name="line.603"></a>
+<span class="sourceLineNo">604</span>      conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      Set&lt;String&gt; keyStrSet = new HashSet&lt;&gt;();<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      byte[][] keys = new byte[NUM_KV][];<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      byte[][] values = new byte[NUM_KV][];<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>      // Write the HFile<a name="line.609"></a>
+<span class="sourceLineNo">610</span>      {<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        HFileContext meta = new HFileContextBuilder()<a name="line.611"></a>
+<span class="sourceLineNo">612</span>                            .withBlockSize(SMALL_BLOCK_SIZE)<a name="line.612"></a>
+<span class="sourceLineNo">613</span>                            .withCompression(compr)<a name="line.613"></a>
+<span class="sourceLineNo">614</span>                            .build();<a name="line.614"></a>
+<span class="sourceLineNo">615</span>        HFile.Writer writer =<a name="line.615"></a>
+<span class="sourceLineNo">616</span>            HFile.getWriterFactory(conf, cacheConf)<a name="line.616"></a>
+<span class="sourceLineNo">617</span>                .withPath(fs, hfilePath)<a name="line.617"></a>
+<span class="sourceLineNo">618</span>                .withFileContext(meta)<a name="line.618"></a>
+<span class="sourceLineNo">619</span>                .create();<a name="line.619"></a>
+<span class="sourceLineNo">620</span>        Random rand = new Random(19231737);<a name="line.620"></a>
+<span class="sourceLineNo">621</span>        byte[] family = Bytes.toBytes("f");<a name="line.621"></a>
+<span class="sourceLineNo">622</span>        byte[] qualifier = Bytes.toBytes("q");<a name="line.622"></a>
+<span class="sourceLineNo">623</span>        for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.623"></a>
+<span class="sourceLineNo">624</span>          byte[] row = RandomKeyValueUtil.randomOrderedKey(rand, i);<a name="line.624"></a>
+<span class="sourceLineNo">625</span><a name="line.625"></a>
+<span class="sourceLineNo">626</span>          // Key will be interpreted by KeyValue.KEY_COMPARATOR<a name="line.626"></a>
+<span class="sourceLineNo">627</span>          KeyValue kv =<a name="line.627"></a>
+<span class="sourceLineNo">628</span>              new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),<a name="line.628"></a>
+<span class="sourceLineNo">629</span>                  RandomKeyValueUtil.randomValue(rand));<a name="line.629"></a>
+<span class="sourceLineNo">630</span>          byte[] k = kv.getKey();<a name="line.630"></a>
+<span class="sourceLineNo">631</span>          writer.append(kv);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>          keys[i] = k;<a name="line.632"></a>
+<span class="sourceLineNo">633</span>          values[i] = CellUtil.cloneValue(kv);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>          keyStrSet.add(Bytes.toStringBinary(k));<a name="line.634"></a>
+<span class="sourceLineNo">635</span>          if (i &gt; 0) {<a name="line.635"></a>
+<span class="sourceLineNo">636</span>            assertTrue((PrivateCellUtil.compare(CellComparatorImpl.COMPARATOR, kv, keys[i - 1],<a name="line.636"></a>
+<span class="sourceLineNo">637</span>                0, keys[i - 1].length)) &gt; 0);<a name="line.637"></a>
+<span class="sourceLineNo">638</span>          }<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
+<span class="sourceLineNo">640</span><a name="line.640"></a>
+<span class="sourceLineNo">641</span>        writer.close();<a name="line.641"></a>
+<span class="sourceLineNo">642</span>      }<a name="line.642"></a>
+<span class="sourceLineNo">643</span><a name="line.643"></a>
+<span class="sourceLineNo">644</span>      // Read the HFile<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);<a name="line.645"></a>
+<span class="sourceLineNo">646</span>      assertEquals(expectedNumLevels,<a name="line.646"></a>
+<span class="sourceLineNo">647</span>          reader.getTrailer().getNumDataIndexLevels());<a name="line.647"></a>
+<span class="sourceLineNo">648</span><a name="line.648"></a>
+<span class="sourceLineNo">649</span>      assertTrue(Bytes.equals(keys[0], ((KeyValue)reader.getFirstKey().get()).getKey()));<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      assertTrue(Bytes.equals(keys[NUM_KV - 1], ((KeyValue)reader.getLastKey().get()).getKey()));<a name="line.650"></a>
+<span class="sourceLineNo">651</span>      LOG.info("Last key: " + Bytes.toStringBinary(keys[NUM_KV - 1]));<a name="line.651"></a>
+<span class="sourceLineNo">652</span><a name="line.652"></a>
+<span class="sourceLineNo">653</span>      for (boolean pread : new boolean[] { false, true }) {<a name="line.653"></a>
+<span class="sourceLineNo">654</span>        HFileScanner scanner = reader.getScanner(true, pread);<a name="line.654"></a>
+<span class="sourceLineNo">655</span>        for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>          checkSeekTo(keys, scanner, i);<a name="line.656"></a>
+<span class="sourceLineNo">657</span>          checkKeyValue("i=" + i, keys[i], values[i],<a name="line.657"></a>
+<span class="sourceLineNo">658</span>              ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue());<a name="line.658"></a>
+<span class="sourceLineNo">659</span>        }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>        assertTrue(scanner.seekTo());<a name="line.660"></a>
+<span class="sourceLineNo">661</span>        for (int i = NUM_KV - 1; i &gt;= 0; --i) {<a name="line.661"></a>
+<span class="sourceLineNo">662</span>          checkSeekTo(keys, scanner, i);<a name="line.662"></a>
+<span class="sourceLineNo">663</span>          checkKeyValue("i=" + i, keys[i], values[i],<a name="line.663"></a>
+<span class="sourceLineNo">664</span>              ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey()), scanner.getValue());<a name="line.664"></a>
+<span class="sourceLineNo">665</span>        }<a name="line.665"></a>
+<span class="sourceLineNo">666</span>      }<a name="line.666"></a>
+<span class="sourceLineNo">667</span><a name="line.667"></a>
+<span class="sourceLineNo">668</span>      // Manually compute the mid-key and validate it.<a name="line.668"></a>
+<span class="sourceLineNo">669</span>      HFile.Reader reader2 = reader;<a name="line.669"></a>
+<span class="sourceLineNo">670</span>      HFileBlock.FSReader fsReader = reader2.getUncachedBlockReader();<a name="line.670"></a>
+<span class="sourceLineNo">671</span><a name="line.671"></a>
+<span class="sourceLineNo">672</span>      HFileBlock.BlockIterator iter = fsReader.blockRange(0,<a name="line.672"></a>
+<span class="sourceLineNo">673</span>          reader.getTrailer().getLoadOnOpenDataOffset());<a name="line.673"></a>
+<span class="sourceLineNo">674</span>      HFileBlock block;<a name="line.674"></a>
+<span class="sourceLineNo">675</span>      List&lt;byte[]&gt; blockKeys = new ArrayList&lt;&gt;();<a name="line.675"></a>
+<span class="sourceLineNo">676</span>      while ((block = iter.nextBlock()) != null) {<a name="line.676"></a>
+<span class="sourceLineNo">677</span>        if (block.getBlockType() != BlockType.LEAF_INDEX)<a name="line.677"></a>
+<span class="sourceLineNo">678</span>          return;<a name="line.678"></a>
+<span class="sourceLineNo">679</span>        ByteBuff b = block.getBufferReadOnly();<a name="line.679"></a>
+<span class="sourceLineNo">680</span>        int n = b.getIntAfterPosition(0);<a name="line.680"></a>
+<span class="sourceLineNo">681</span>        // One int for the number of items, and n + 1 for the secondary index.<a name="line.681"></a>
+<span class="sourceLineNo">682</span>        int entriesOffset = Bytes.SIZEOF_INT * (n + 2);<a name="line.682"></a>
+<span class="sourceLineNo">683</span><a name="line.683"></a>
+<span class="sourceLineNo">684</span>        // Get all the keys from the leaf index block. S<a name="line.684"></a>
+<span class="sourceLineNo">685</span>        for (int i = 0; i &lt; n; ++i) {<a name="line.685"></a>
+<span class="sourceLineNo">686</span>          int keyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 1));<a name="line.686"></a>
+<span class="sourceLineNo">687</span>          int nextKeyRelOffset = b.getIntAfterPosition(Bytes.SIZEOF_INT * (i + 2));<a name="line.687"></a>
+<span class="sourceLineNo">688</span>          int keyLen = nextKeyRelOffset - keyRelOffset;<a name="line.688"></a>
+<span class="sourceLineNo">689</span>          int keyOffset = b.arrayOffset() + entriesOffset + keyRelOffset +<a name="line.689"></a>
+<span class="sourceLineNo">690</span>              HFileBlockIndex.SECONDARY_INDEX_ENTRY_OVERHEAD;<a name="line.690"></a>
+<span class="sourceLineNo">691</span>          byte[] blockKey = Arrays.copyOfRange(b.array(), keyOffset, keyOffset<a name="line.691"></a>
+<span class="sourceLineNo">692</span>              + keyLen);<a name="line.692"></a>
+<span class="sourceLineNo">693</span>          String blockKeyStr = Bytes.toString(blockKey);<a name="line.693"></a>
+<span class="sourceLineNo">694</span>          blockKeys.add(blockKey);<a name="line.694"></a>
+<span class="sourceLineNo">695</span><a name="line.695"></a>
+<span class="sourceLineNo">696</span>          // If the first key of the block is not among the keys written, we<a name="line.696"></a>
+<span class="sourceLineNo">697</span>          // are not parsing the non-root index block format correctly.<a name="line.697"></a>
+<span class="sourceLineNo">698</span>          assertTrue("Invalid block key from leaf-level block: " + blockKeyStr,<a name="line.698"></a>
+<span class="sourceLineNo">699</span>              keyStrSet.contains(blockKeyStr));<a name="line.699"></a>
+<span class="sourceLineNo">700</span>        }<a name="line.700"></a>
+<span class="sourceLineNo">701</span>      }<a name="line.701"></a>
 <span class="sourceLineNo">702</span><a name="line.702"></a>
-<span class="sourceLineNo">703</span>          // If the first key of the block is not among the keys written, we<a name="line.703"></a>
-<span class="sourceLineNo">704</span>          // are not parsing the non-root index block format correctly.<a name="line.704"></a>
-<span class="sourceLineNo">705</span>          assertTrue("Invalid block key from leaf-level block: " + blockKeyStr,<a name="line.705"></a>
-<span class="sourceLineNo">706</span>              keyStrSet.contains(blockKeyStr));<a name="line.706"></a>
-<span class="sourceLineNo">707</span>        }<a name="line.707"></a>
-<span class="sourceLineNo">708</span>      }<a name="line.708"></a>
-<span class="sourceLineNo">709</span><a name="line.709"></a>
-<span class="sourceLineNo">710</span>      // Validate the mid-key.<a name="line.710"></a>
-<span class="sourceLineNo">711</span>      assertEquals(<a name="line.711"></a>
-<span class="sourceLineNo">712</span>          Bytes.toStringBinary(blockKeys.get((blockKeys.size() - 1) / 2)),<a name="line.712"></a>
-<span class="sourceLineNo">713</span>          reader.midKey());<a name="line.713"></a>
-<span class="sourceLineNo">714</span><a name="line.714"></a>
-<span class="sourceLineNo">715</span>      assertEquals(UNCOMPRESSED_INDEX_SIZES[testI],<a name="line.715"></a>
-<span class="sourceLineNo">716</span>          reader.getTrailer().getUncompressedDataIndexSize());<a name="line.716"></a>
-<span class="sourceLineNo">717</span><a name="line.717"></a>
-<span class="sourceLineNo">718</span>      reader.close();<a name="line.718"></a>
-<span class="sourceLineNo">719</span>      reader2.close();<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    }<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  private void checkSeekTo(byte[][] keys, HFileScanner scanner, int i)<a name="line.723"></a>
-<span class="sourceLineNo">724</span>      throws IOException {<a name="line.724"></a>
-<span class="sourceLineNo">725</span>    assertEquals("Failed to seek to key #" + i + " (" + Bytes.toStringBinary(keys[i]) + ")", 0,<a name="line.725"></a>
-<span class="sourceLineNo">726</span>        scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys[i])));<a name="line.726"></a>
+<span class="sourceLineNo">703</span>      // Validate the mid-key.<a name="line.703"></a>
+<span class="sourceLineNo">704</span>      assertEquals(<a name="line.704"></a>
+<span class="sourceLineNo">705</span>          Bytes.toStringBinary(blockKeys.get((blockKeys.size() - 1) / 2)),<a name="line.705"></a>
+<span class="sourceLineNo">706</span>          reader.midKey());<a name="line.706"></a>
+<span class="sourceLineNo">707</span><a name="line.707"></a>
+<span class="sourceLineNo">708</span>      assertEquals(UNCOMPRESSED_INDEX_SIZES[testI],<a name="line.708"></a>
+<span class="sourceLineNo">709</span>          reader.getTrailer().getUncompressedDataIndexSize());<a name="line.709"></a>
+<span class="sourceLineNo">710</span><a name="line.710"></a>
+<span class="sourceLineNo">711</span>      reader.close();<a name="line.711"></a>
+<span class="sourceLineNo">712</span>      reader2.close();<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    }<a name="line.713"></a>
+<span class="sourceLineNo">714</span>  }<a name="line.714"></a>
+<span class="sourceLineNo">715</span><a name="line.715"></a>
+<span class="sourceLineNo">716</span>  private void checkSeekTo(byte[][] keys, HFileScanner scanner, int i)<a name="line.716"></a>
+<span class="sourceLineNo">717</span>      throws IOException {<a name="line.717"></a>
+<span class="sourceLineNo">718</span>    assertEquals("Failed to seek to key #" + i + " (" + Bytes.toStringBinary(keys[i]) + ")", 0,<a name="line.718"></a>
+<span class="sourceLineNo">719</span>        scanner.seekTo(KeyValueUtil.createKeyValueFromKey(keys[i])));<a name="line.719"></a>
+<span class="sourceLineNo">720</span>  }<a name="line.720"></a>
+<span class="sourceLineNo">721</span><a name="line.721"></a>
+<span class="sourceLineNo">722</span>  private void assertArrayEqualsBuffer(String msgPrefix, byte[] arr,<a name="line.722"></a>
+<span class="sourceLineNo">723</span>      ByteBuffer buf) {<a name="line.723"></a>
+<span class="sourceLineNo">724</span>    assertEquals(msgPrefix + ": expected " + Bytes.toStringBinary(arr)<a name="line.724"></a>
+<span class="sourceLineNo">725</span>        + ", actual " + Bytes.toStringBinary(buf), 0, Bytes.compareTo(arr, 0,<a name="line.725"></a>
+<span class="sourceLineNo">726</span>        arr.length, buf.array(), buf.arrayOffset(), buf.limit()));<a name="line.726"></a>
 <span class="sourceLineNo">727</span>  }<a name="line.727"></a>
 <span class="sourceLineNo">728</span><a name="line.728"></a>
-<span class="sourceLineNo">729</span>  private void assertArrayEqualsBuffer(String msgPrefix, byte[] arr,<a name="line.729"></a>
-<span class="sourceLineNo">730</span>      ByteBuffer buf) {<a name="line.730"></a>
-<span class="sourceLineNo">731</span>    assertEquals(msgPrefix + ": expected " + Bytes.toStringBinary(arr)<a name="line.731"></a>
-<span class="sourceLineNo">732</span>        + ", actual " + Bytes.toStringBinary(buf), 0, Bytes.compareTo(arr, 0,<a name="line.732"></a>
-<span class="sourceLineNo">733</span>        arr.length, buf.array(), buf.arrayOffset(), buf.limit()));<a name="line.733"></a>
-<span class="sourceLineNo">734</span>  }<a name="line.734"></a>
-<span class="sourceLineNo">735</span><a name="line.735"></a>
-<span class="sourceLineNo">736</span>  /** Check a key/value pair after it was read by the reader */<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  private void checkKeyValue(String msgPrefix, byte[] expectedKey,<a name="line.737"></a>
-<span class="sourceLineNo">738</span>      byte[] expectedValue, ByteBuffer keyRead, ByteBuffer valueRead) {<a name="line.738"></a>
-<span class="sourceLineNo">739</span>    if (!msgPrefix.isEmpty())<a name="line.739"></a>
-<span class="sourceLineNo">740</span>      msgPrefix += ". ";<a name="line.740"></a>
-<span class="sourceLineNo">741</span><a name="line.741"></a>
-<span class="sourceLineNo">742</span>    assertArrayEqualsBuffer(msgPrefix + "Invalid key", expectedKey, keyRead);<a name="line.742"></a>
-<span class="sourceLineNo">743</span>    assertArrayEqualsBuffer(msgPrefix + "Invalid value", expectedValue,<a name="line.743"></a>
-<span class="sourceLineNo">744</span>        valueRead);<a name="line.744"></a>
-<span class="sourceLineNo">745</span>  }<a name="line.745"></a>
-<span class="sourceLineNo">746</span><a name="line.746"></a>
-<span class="sourceLineNo">747</span>  @Test<a name="line.747"></a>
-<span class="sourceLineNo">748</span>  public void testIntermediateLevelIndicesWithLargeKeys() throws IOException {<a name="line.748"></a>
-<span class="sourceLineNo">749</span>    testIntermediateLevelIndicesWithLargeKeys(16);<a name="line.749"></a>
-<span class="sourceLineNo">750</span>  }<a name="line.750"></a>
-<span class="sourceLineNo">751</span><a name="line.751"></a>
-<span class="sourceLineNo">752</span>  @Test<a name="line.752"></a>
-<span class="sourceLineNo">753</span>  public void testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries() throws IOException {<a name="line.753"></a>
-<span class="sourceLineNo">754</span>    // because of the large rowKeys, we will end up with a 50-level block index without sanity check<a name="line.754"></a>
-<span class="sourceLineNo">755</span>    testIntermediateLevelIndicesWithLargeKeys(2);<a name="line.755"></a>
-<span class="sourceLineNo">756</span>  }<a name="line.756"></a>
-<span class="sourceLineNo">757</span><a name="line.757"></a>
-<span class="sourceLineNo">758</span>  public void testIntermediateLevelIndicesWithLargeKeys(int minNumEntries) throws IOException {<a name="line.758"></a>
-<span class="sourceLineNo">759</span>    Path hfPath = new Path(TEST_UTIL.getDataTestDir(),<a name="line.759"></a>
-<span class="sourceLineNo">760</span>      "testIntermediateLevelIndicesWithLargeKeys.hfile");<a name="line.760"></a>
-<span class="sourceLineNo">761</span>    int maxChunkSize = 1024;<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    FileSystem fs = FileSystem.get(conf);<a name="line.762"></a>
-<span class="sourceLineNo">763</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.763"></a>
-<span class="sourceLineNo">764</span>    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    conf.setInt(HFileBlockIndex.MIN_INDEX_NUM_ENTRIES_KEY, minNumEntries);<a name="line.765"></a>
-<span class="sourceLineNo">766</span>    HFileContext context = new HFileContextBuilder().withBlockSize(16).build();<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf)<a name="line.767"></a>
-<span class="sourceLineNo">768</span>            .withFileContext(context)<a name="line.768"></a>
-<span class="sourceLineNo">769</span>            .withPath(fs, hfPath).create();<a name="line.769"></a>
-<span class="sourceLineNo">770</span>    List&lt;byte[]&gt; keys = new ArrayList&lt;&gt;();<a name="line.770"></a>
-<span class="sourceLineNo">771</span><a name="line.771"></a>
-<span class="sourceLineNo">772</span>    // This should result in leaf-level indices and a root level index<a name="line.772"></a>
-<span class="sourceLineNo">773</span>    for (int i=0; i &lt; 100; i++) {<a name="line.773"></a>
-<span class="sourceLineNo">774</span>      byte[] rowkey = new byte[maxChunkSize + 1];<a name="line.774"></a>
-<span class="sourceLineNo">775</span>      byte[] b = Bytes.toBytes(i);<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      System.arraycopy(b, 0, rowkey, rowkey.length - b.length, b.length);<a name="line.776"></a>
-<span class="sourceLineNo">777</span>      keys.add(rowkey);<a name="line.777"></a>
-<span class="sourceLineNo">778</span>      hfw.append(CellUtil.createCell(rowkey));<a name="line.778"></a>
-<span class="sourceLineNo">779</span>    }<a name="line.779"></a>
-<span class="sourceLineNo">780</span>    hfw.close();<a name="line.780"></a>
-<span class="sourceLineNo">781</span><a name="line.781"></a>
-<span class="sourceLineNo">782</span>    HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf);<a name="line.782"></a>
-<span class="sourceLineNo">783</span>    // Scanner doesn't do Cells yet.  Fix.<a name="line.783"></a>
-<span class="sourceLineNo">784</span>    HFileScanner scanner = reader.getScanner(true, true);<a name="line.784"></a>
-<span class="sourceLineNo">785</span>    for (int i = 0; i &lt; keys.size(); ++i) {<a name="line.785"></a>
-<span class="sourceLineNo">786</span>      scanner.seekTo(CellUtil.createCell(keys.get(i)));<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    }<a name="line.787"></a>
-<span class="sourceLineNo">788</span>    reader.close();<a name="line.788"></a>
-<span class="sourceLineNo">789</span>  }<a name="line.789"></a>
-<span class="sourceLineNo">790</span>}<a name="line.790"></a>
-<span class="sourceLineNo">791</span><a name="line.791"></a>
+<span class="sourceLineNo">729</span>  /** Check a key/value pair after it was read by the reader */<a name="line.729"></a>
+<span class="sourceLineNo">730</span>  private void checkKeyValue(String msgPrefix, byte[] expectedKey,<a name="line.730"></a>
+<span class="sourceLineNo">731</span>      byte[] expectedValue, ByteBuffer keyRead, ByteBuffer valueRead) {<a name="line.731"></a>
+<span class="sourceLineNo">732</span>    if (!msgPrefix.isEmpty())<a name="line.732"></a>
+<span class="sourceLineNo">733</span>      msgPrefix += ". ";<a name="line.733"></a>
+<span class="sourceLineNo">734</span><a name="line.734"></a>
+<span class="sourceLineNo">735</span>    assertArrayEqualsBuffer(msgPrefix + "Invalid key", expectedKey, keyRead);<a name="line.735"></a>
+<span class="sourceLineNo">736</span>    assertArrayEqualsBuffer(msgPrefix + "Invalid value", expectedValue,<a name="line.736"></a>
+<span class="sourceLineNo">737</span>        valueRead);<a name="line.737"></a>
+<span class="sourceLineNo">738</span>  }<a name="line.738"></a>
+<span class="sourceLineNo">739</span><a name="line.739"></a>
+<span class="sourceLineNo">740</span>  @Test<a name="line.740"></a>
+<span class="sourceLineNo">741</span>  public void testIntermediateLevelIndicesWithLargeKeys() throws IOException {<a name="line.741"></a>
+<span class="sourceLineNo">742</span>    testIntermediateLevelIndicesWithLargeKeys(16);<a name="line.742"></a>
+<span class="sourceLineNo">743</span>  }<a name="line.743"></a>
+<span class="sourceLineNo">744</span><a name="line.744"></a>
+<span class="sourceLineNo">745</span>  @Test<a name="line.745"></a>
+<span class="sourceLineNo">746</span>  public void testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries() throws IOException {<a name="line.746"></a>
+<span class="sourceLineNo">747</span>    // because of the large rowKeys, we will end up with a 50-level block index without sanity check<a name="line.747"></a>
+<span class="sourceLineNo">748</span>    testIntermediateLevelIndicesWithLargeKeys(2);<a name="line.748"></a>
+<span class="sourceLineNo">749</span>  }<a name="line.749"></a>
+<span class="sourceLineNo">750</span><a name="line.750"></a>
+<span class="sourceLineNo">751</span>  public void testIntermediateLevelIndicesWithLargeKeys(int minNumEntries) throws IOException {<a name="line.751"></a>
+<span class="sourceLineNo">752</span>    Path hfPath = new Path(TEST_UTIL.getDataTestDir(),<a name="line.752"></a>
+<span class="sourceLineNo">753</span>      "testIntermediateLevelIndicesWithLargeKeys.hfile");<a name="line.753"></a>
+<span class="sourceLineNo">754</span>    int maxChunkSize = 1024;<a name="line.754"></a>
+<span class="sourceLineNo">755</span>    FileSystem fs = FileSystem.get(conf);<a name="line.755"></a>
+<span class="sourceLineNo">756</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    conf.setInt(HFileBlockIndex.MIN_INDEX_NUM_ENTRIES_KEY, minNumEntries);<a name="line.758"></a>
+<span class="sourceLineNo">759</span>    HFileContext context = new HFileContextBuilder().withBlockSize(16).build();<a name="line.759"></a>
+<span class="sourceLineNo">760</span>    HFile.Writer hfw = new HFile.WriterFactory(conf, cacheConf)<a name="line.760"></a>
+<span class="sourceLineNo">761</span>            .withFileContext(context)<a name="line.761"></a>
+<span class="sourceLineNo">762</span>            .withPath(fs, hfPath).create();<a name="line.762"></a>
+<span class="sourceLineNo">763</span>    List&lt;byte[]&gt; keys = new ArrayList&lt;&gt;();<a name="line.763"></a>
+<span class="sourceLineNo">764</span><a name="line.764"></a>
+<span class="sourceLineNo">765</span>    // This should result in leaf-level indices and a root level index<a name="line.765"></a>
+<span class="sourceLineNo">766</span>    for (int i=0; i &lt; 100; i++) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>      byte[] rowkey = new byte[maxChunkSize + 1];<a name="line.767"></a>
+<span class="sourceLineNo">768</span>      byte[] b = Bytes.toBytes(i);<a name="line.768"></a>
+<span class="sourceLineNo">769</span>      System.arraycopy(b, 0, rowkey, rowkey.length - b.length, b.length);<a name="line.769"></a>
+<span class="sourceLineNo">770</span>      keys.add(rowkey);<a name="line.770"></a>
+<span class="sourceLineNo">771</span>      hfw.append(CellUtil.createCell(rowkey));<a name="line.771"></a>
+<span class="sourceLineNo">772</span>    }<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    hfw.close();<a name="line.773"></a>
+<span class="sourceLineNo">774</span><a name="line.774"></a>
+<span class="sourceLineNo">775</span>    HFile.Reader reader = HFile.createReader(fs, hfPath, cacheConf, true, conf);<a name="line.775"></a>
+<span class="sourceLineNo">776</span>    // Scanner doesn't do Cells yet.  Fix.<a name="line.776"></a>
+<span class="sourceLineNo">777</span>    HFileScanner scanner = reader.getScanner(true, true);<a name="line.777"></a>
+<span class="sourceLineNo">778</span>    for (int i = 0; i &lt; keys.size(); ++i) {<a name="line.778"></a>
+<span class="sourceLineNo">779</span>      scanner.seekTo(CellUtil.createCell(keys.get(i)));<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    }<a name="line.780"></a>
+<span class="sourceLineNo">781</span>    reader.close();<a name="line.781"></a>
+<span class="sourceLineNo">782</span>  }<a name="line.782"></a>
+<span class="sourceLineNo">783</span>}<a name="line.783"></a>
+<span class="sourceLineNo">784</span><a name="line.784"></a>
 
 
 


[15/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index 6369c27..ea05301 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -603,3251 +603,3256 @@
 <span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
 <span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
 <span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.599"></a>
+<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span><a name="line.599"></a>
 <span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
 <span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      cacheConfig = new CacheConfig(conf, !isMasterNotCarryTable);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      mobCacheConfig = new MobCacheConfig(conf, !isMasterNotCarryTable);<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        @Override<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>        }<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      };<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>      initializeFileSystem();<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.612"></a>
-<span class="sourceLineNo">613</span><a name="line.613"></a>
-<span class="sourceLineNo">614</span>      this.configurationManager = new ConfigurationManager();<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.615"></a>
-<span class="sourceLineNo">616</span><a name="line.616"></a>
-<span class="sourceLineNo">617</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>        // Open connection to zookeeper and set primary watcher<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.620"></a>
-<span class="sourceLineNo">621</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.621"></a>
-<span class="sourceLineNo">622</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.622"></a>
-<span class="sourceLineNo">623</span>        if (!this.masterless) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>          masterAddressTracker.start();<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span>          clusterStatusTracker.start();<a name="line.630"></a>
-<span class="sourceLineNo">631</span>        } else {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker = null;<a name="line.632"></a>
-<span class="sourceLineNo">633</span>          clusterStatusTracker = null;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>        }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      } else {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        zooKeeper = null;<a name="line.636"></a>
-<span class="sourceLineNo">637</span>        masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>        clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>      }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      this.rpcServices.start(zooKeeper);<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.641"></a>
-<span class="sourceLineNo">642</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      // class HRS. TODO.<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      this.choreService = new ChoreService(getName(), true);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      this.executorService = new ExecutorService(getName());<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      putUpWebUI();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    } catch (Throwable t) {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      // cause of failed startup is lost.<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      LOG.error("Failed construction RegionServer", t);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      throw t;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>  }<a name="line.655"></a>
-<span class="sourceLineNo">656</span><a name="line.656"></a>
-<span class="sourceLineNo">657</span>  // HMaster should override this method to load the specific config for master<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.662"></a>
-<span class="sourceLineNo">663</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.663"></a>
-<span class="sourceLineNo">664</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>        throw new IOException(msg);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      } else {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return rpcServices.isa.getHostName();<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      }<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    } else {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      return hostname;<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    }<a name="line.671"></a>
-<span class="sourceLineNo">672</span>  }<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>  /**<a name="line.674"></a>
-<span class="sourceLineNo">675</span>   * If running on Windows, do windows-specific setup.<a name="line.675"></a>
-<span class="sourceLineNo">676</span>   */<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        @Override<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        public void handle(Signal signal) {<a name="line.681"></a>
-<span class="sourceLineNo">682</span>          conf.reloadConfiguration();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>          cm.notifyAllObservers(conf);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      });<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    NettyEventLoopGroupConfig nelgc =<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    return nelgc;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>  }<a name="line.696"></a>
-<span class="sourceLineNo">697</span><a name="line.697"></a>
-<span class="sourceLineNo">698</span>  private void initializeFileSystem() throws IOException {<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    // (unless all is set to defaults).<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    return new FSTableDescriptors(this.conf,<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.716"></a>
+<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
+<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
+<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
+<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
+<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
+<span class="sourceLineNo">615</span><a name="line.615"></a>
+<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
+<span class="sourceLineNo">618</span><a name="line.618"></a>
+<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
+<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
+<span class="sourceLineNo">621</span><a name="line.621"></a>
+<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
+<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
+<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
+<span class="sourceLineNo">630</span><a name="line.630"></a>
+<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
+<span class="sourceLineNo">633</span><a name="line.633"></a>
+<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
+<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
+<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
+<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
+<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
+<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
+<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
+<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
+<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
+<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
+<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
+<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
+<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
+<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
+<span class="sourceLineNo">661</span><a name="line.661"></a>
+<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
+<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
+<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
+<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
+<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
+<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
+<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
+<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
+<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
+<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
+<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
+<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
+<span class="sourceLineNo">678</span><a name="line.678"></a>
+<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
+<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
+<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
+<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
+<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
+<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
+<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
+<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
+<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
+<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
+<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
+<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
+<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
+<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
+<span class="sourceLineNo">693</span><a name="line.693"></a>
+<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
+<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
+<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
+<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
+<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
+<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
+<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
+<span class="sourceLineNo">702</span><a name="line.702"></a>
+<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
+<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
+<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
+<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
+<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
+<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
+<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
+<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
+<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
+<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
 <span class="sourceLineNo">717</span>  }<a name="line.717"></a>
 <span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return null;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    user.login("hbase.regionserver.keytab.file",<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.725"></a>
+<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
+<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
+<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
+<span class="sourceLineNo">723</span><a name="line.723"></a>
+<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
+<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
 <span class="sourceLineNo">726</span>  }<a name="line.726"></a>
 <span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span><a name="line.728"></a>
-<span class="sourceLineNo">729</span>  /**<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   * Wait for an active Master.<a name="line.730"></a>
-<span class="sourceLineNo">731</span>   * See override in Master superclass for how it is used.<a name="line.731"></a>
-<span class="sourceLineNo">732</span>   */<a name="line.732"></a>
-<span class="sourceLineNo">733</span>  protected void waitForMasterActive() {}<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>  protected String getProcessName() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    return REGIONSERVER;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>  protected boolean canCreateBaseZNode() {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    return this.masterless;<a name="line.740"></a>
-<span class="sourceLineNo">741</span>  }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>  protected boolean canUpdateTableDescriptor() {<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    return false;<a name="line.744"></a>
-<span class="sourceLineNo">745</span>  }<a name="line.745"></a>
-<span class="sourceLineNo">746</span><a name="line.746"></a>
-<span class="sourceLineNo">747</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    return new RSRpcServices(this);<a name="line.748"></a>
-<span class="sourceLineNo">749</span>  }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>  protected void configureInfoServer() {<a name="line.751"></a>
-<span class="sourceLineNo">752</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.753"></a>
+<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
+<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
+<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
+<span class="sourceLineNo">732</span><a name="line.732"></a>
+<span class="sourceLineNo">733</span><a name="line.733"></a>
+<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
+<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
+<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
+<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
+<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">739</span><a name="line.739"></a>
+<span class="sourceLineNo">740</span>  protected String getProcessName() {<a name="line.740"></a>
+<span class="sourceLineNo">741</span>    return REGIONSERVER;<a name="line.741"></a>
+<span class="sourceLineNo">742</span>  }<a name="line.742"></a>
+<span class="sourceLineNo">743</span><a name="line.743"></a>
+<span class="sourceLineNo">744</span>  protected boolean canCreateBaseZNode() {<a name="line.744"></a>
+<span class="sourceLineNo">745</span>    return this.masterless;<a name="line.745"></a>
+<span class="sourceLineNo">746</span>  }<a name="line.746"></a>
+<span class="sourceLineNo">747</span><a name="line.747"></a>
+<span class="sourceLineNo">748</span>  protected boolean canUpdateTableDescriptor() {<a name="line.748"></a>
+<span class="sourceLineNo">749</span>    return false;<a name="line.749"></a>
+<span class="sourceLineNo">750</span>  }<a name="line.750"></a>
+<span class="sourceLineNo">751</span><a name="line.751"></a>
+<span class="sourceLineNo">752</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.752"></a>
+<span class="sourceLineNo">753</span>    return new RSRpcServices(this);<a name="line.753"></a>
 <span class="sourceLineNo">754</span>  }<a name="line.754"></a>
 <span class="sourceLineNo">755</span><a name="line.755"></a>
-<span class="sourceLineNo">756</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>    return RSDumpServlet.class;<a name="line.757"></a>
-<span class="sourceLineNo">758</span>  }<a name="line.758"></a>
-<span class="sourceLineNo">759</span><a name="line.759"></a>
-<span class="sourceLineNo">760</span>  @Override<a name="line.760"></a>
-<span class="sourceLineNo">761</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    /*<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * No stacking of instances is allowed for a single executorService name<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        instance.getDescriptorForType();<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.769"></a>
-<span class="sourceLineNo">770</span>          + " already registered, rejecting request from " + instance);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      return false;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>    }<a name="line.772"></a>
-<span class="sourceLineNo">773</span><a name="line.773"></a>
-<span class="sourceLineNo">774</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>    if (LOG.isDebugEnabled()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.776"></a>
+<span class="sourceLineNo">756</span>  protected void configureInfoServer() {<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.758"></a>
+<span class="sourceLineNo">759</span>  }<a name="line.759"></a>
+<span class="sourceLineNo">760</span><a name="line.760"></a>
+<span class="sourceLineNo">761</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.761"></a>
+<span class="sourceLineNo">762</span>    return RSDumpServlet.class;<a name="line.762"></a>
+<span class="sourceLineNo">763</span>  }<a name="line.763"></a>
+<span class="sourceLineNo">764</span><a name="line.764"></a>
+<span class="sourceLineNo">765</span>  @Override<a name="line.765"></a>
+<span class="sourceLineNo">766</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>    /*<a name="line.767"></a>
+<span class="sourceLineNo">768</span>     * No stacking of instances is allowed for a single executorService name<a name="line.768"></a>
+<span class="sourceLineNo">769</span>     */<a name="line.769"></a>
+<span class="sourceLineNo">770</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.770"></a>
+<span class="sourceLineNo">771</span>        instance.getDescriptorForType();<a name="line.771"></a>
+<span class="sourceLineNo">772</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.774"></a>
+<span class="sourceLineNo">775</span>          + " already registered, rejecting request from " + instance);<a name="line.775"></a>
+<span class="sourceLineNo">776</span>      return false;<a name="line.776"></a>
 <span class="sourceLineNo">777</span>    }<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    return true;<a name="line.778"></a>
-<span class="sourceLineNo">779</span>  }<a name="line.779"></a>
-<span class="sourceLineNo">780</span><a name="line.780"></a>
-<span class="sourceLineNo">781</span>  /**<a name="line.781"></a>
-<span class="sourceLineNo">782</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   */<a name="line.784"></a>
-<span class="sourceLineNo">785</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    Configuration conf = this.conf;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.787"></a>
-<span class="sourceLineNo">788</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.788"></a>
-<span class="sourceLineNo">789</span>      // the conf and unset the client ZK related properties<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      conf = new Configuration(this.conf);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.791"></a>
-<span class="sourceLineNo">792</span>    }<a name="line.792"></a>
-<span class="sourceLineNo">793</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    // and remote invocations.<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.799"></a>
-<span class="sourceLineNo">800</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.801"></a>
-<span class="sourceLineNo">802</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    return conn;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>  }<a name="line.805"></a>
-<span class="sourceLineNo">806</span><a name="line.806"></a>
-<span class="sourceLineNo">807</span>  /**<a name="line.807"></a>
-<span class="sourceLineNo">808</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.808"></a>
-<span class="sourceLineNo">809</span>   * @param c<a name="line.809"></a>
-<span class="sourceLineNo">810</span>   * @throws IOException<a name="line.810"></a>
-<span class="sourceLineNo">811</span>   */<a name="line.811"></a>
-<span class="sourceLineNo">812</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>    // check to see if the codec list is available:<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    if (codecs == null) return;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>    for (String codec : codecs) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.817"></a>
-<span class="sourceLineNo">818</span>        throw new IOException("Compression codec " + codec +<a name="line.818"></a>
-<span class="sourceLineNo">819</span>          " not supported, aborting RS construction");<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      }<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    }<a name="line.821"></a>
-<span class="sourceLineNo">822</span>  }<a name="line.822"></a>
-<span class="sourceLineNo">823</span><a name="line.823"></a>
-<span class="sourceLineNo">824</span>  public String getClusterId() {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>    return this.clusterId;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>  }<a name="line.826"></a>
-<span class="sourceLineNo">827</span><a name="line.827"></a>
-<span class="sourceLineNo">828</span>  /**<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * Setup our cluster connection if not already initialized.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   */<a name="line.830"></a>
-<span class="sourceLineNo">831</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>    if (clusterConnection == null) {<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      clusterConnection = createClusterConnection();<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      metaTableLocator = new MetaTableLocator();<a name="line.834"></a>
-<span class="sourceLineNo">835</span>    }<a name="line.835"></a>
-<span class="sourceLineNo">836</span>  }<a name="line.836"></a>
-<span class="sourceLineNo">837</span><a name="line.837"></a>
-<span class="sourceLineNo">838</span>  /**<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.839"></a>
-<span class="sourceLineNo">840</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.840"></a>
-<span class="sourceLineNo">841</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.841"></a>
-<span class="sourceLineNo">842</span>   */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>  private void preRegistrationInitialization() {<a name="line.843"></a>
-<span class="sourceLineNo">844</span>    try {<a name="line.844"></a>
-<span class="sourceLineNo">845</span>      initializeZooKeeper();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>      setupClusterConnection();<a name="line.846"></a>
-<span class="sourceLineNo">847</span>      // Setup RPC client for master communication<a name="line.847"></a>
-<span class="sourceLineNo">848</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    } catch (Throwable t) {<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      // Call stop if error or process will stick around for ever since server<a name="line.851"></a>
-<span class="sourceLineNo">852</span>      // puts up non-daemon threads.<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      this.rpcServices.stop();<a name="line.853"></a>
-<span class="sourceLineNo">854</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.854"></a>
-<span class="sourceLineNo">855</span>    }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>  }<a name="line.856"></a>
-<span class="sourceLineNo">857</span><a name="line.857"></a>
-<span class="sourceLineNo">858</span>  /**<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * &lt;p&gt;<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   * Finally open long-living server short-circuit connection.<a name="line.862"></a>
-<span class="sourceLineNo">863</span>   */<a name="line.863"></a>
-<span class="sourceLineNo">864</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    justification="cluster Id znode read would give us correct response")<a name="line.865"></a>
-<span class="sourceLineNo">866</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    // Nothing to do in here if no Master in the mix.<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    if (this.masterless) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      return;<a name="line.869"></a>
-<span class="sourceLineNo">870</span>    }<a name="line.870"></a>
-<span class="sourceLineNo">871</span><a name="line.871"></a>
-<span class="sourceLineNo">872</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.872"></a>
-<span class="sourceLineNo">873</span>    // block until a master is available.  No point in starting up if no master<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // running.<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.875"></a>
+<span class="sourceLineNo">778</span><a name="line.778"></a>
+<span class="sourceLineNo">779</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    if (LOG.isDebugEnabled()) {<a name="line.780"></a>
+<span class="sourceLineNo">781</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.781"></a>
+<span class="sourceLineNo">782</span>    }<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    return true;<a name="line.783"></a>
+<span class="sourceLineNo">784</span>  }<a name="line.784"></a>
+<span class="sourceLineNo">785</span><a name="line.785"></a>
+<span class="sourceLineNo">786</span>  /**<a name="line.786"></a>
+<span class="sourceLineNo">787</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.787"></a>
+<span class="sourceLineNo">788</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.788"></a>
+<span class="sourceLineNo">789</span>   */<a name="line.789"></a>
+<span class="sourceLineNo">790</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.790"></a>
+<span class="sourceLineNo">791</span>    Configuration conf = this.conf;<a name="line.791"></a>
+<span class="sourceLineNo">792</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.792"></a>
+<span class="sourceLineNo">793</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.793"></a>
+<span class="sourceLineNo">794</span>      // the conf and unset the client ZK related properties<a name="line.794"></a>
+<span class="sourceLineNo">795</span>      conf = new Configuration(this.conf);<a name="line.795"></a>
+<span class="sourceLineNo">796</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.796"></a>
+<span class="sourceLineNo">797</span>    }<a name="line.797"></a>
+<span class="sourceLineNo">798</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.798"></a>
+<span class="sourceLineNo">799</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.799"></a>
+<span class="sourceLineNo">800</span>    // and remote invocations.<a name="line.800"></a>
+<span class="sourceLineNo">801</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.801"></a>
+<span class="sourceLineNo">802</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.802"></a>
+<span class="sourceLineNo">803</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.803"></a>
+<span class="sourceLineNo">804</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.804"></a>
+<span class="sourceLineNo">805</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.805"></a>
+<span class="sourceLineNo">806</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.806"></a>
+<span class="sourceLineNo">807</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.807"></a>
+<span class="sourceLineNo">808</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.808"></a>
+<span class="sourceLineNo">809</span>    return conn;<a name="line.809"></a>
+<span class="sourceLineNo">810</span>  }<a name="line.810"></a>
+<span class="sourceLineNo">811</span><a name="line.811"></a>
+<span class="sourceLineNo">812</span>  /**<a name="line.812"></a>
+<span class="sourceLineNo">813</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.813"></a>
+<span class="sourceLineNo">814</span>   * @param c<a name="line.814"></a>
+<span class="sourceLineNo">815</span>   * @throws IOException<a name="line.815"></a>
+<span class="sourceLineNo">816</span>   */<a name="line.816"></a>
+<span class="sourceLineNo">817</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.817"></a>
+<span class="sourceLineNo">818</span>    // check to see if the codec list is available:<a name="line.818"></a>
+<span class="sourceLineNo">819</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.819"></a>
+<span class="sourceLineNo">820</span>    if (codecs == null) return;<a name="line.820"></a>
+<span class="sourceLineNo">821</span>    for (String codec : codecs) {<a name="line.821"></a>
+<span class="sourceLineNo">822</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.822"></a>
+<span class="sourceLineNo">823</span>        throw new IOException("Compression codec " + codec +<a name="line.823"></a>
+<span class="sourceLineNo">824</span>          " not supported, aborting RS construction");<a name="line.824"></a>
+<span class="sourceLineNo">825</span>      }<a name="line.825"></a>
+<span class="sourceLineNo">826</span>    }<a name="line.826"></a>
+<span class="sourceLineNo">827</span>  }<a name="line.827"></a>
+<span class="sourceLineNo">828</span><a name="line.828"></a>
+<span class="sourceLineNo">829</span>  public String getClusterId() {<a name="line.829"></a>
+<span class="sourceLineNo">830</span>    return this.clusterId;<a name="line.830"></a>
+<span class="sourceLineNo">831</span>  }<a name="line.831"></a>
+<span class="sourceLineNo">832</span><a name="line.832"></a>
+<span class="sourceLineNo">833</span>  /**<a name="line.833"></a>
+<span class="sourceLineNo">834</span>   * Setup our cluster connection if not already initialized.<a name="line.834"></a>
+<span class="sourceLineNo">835</span>   */<a name="line.835"></a>
+<span class="sourceLineNo">836</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.836"></a>
+<span class="sourceLineNo">837</span>    if (clusterConnection == null) {<a name="line.837"></a>
+<span class="sourceLineNo">838</span>      clusterConnection = createClusterConnection();<a name="line.838"></a>
+<span class="sourceLineNo">839</span>      metaTableLocator = new MetaTableLocator();<a name="line.839"></a>
+<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
+<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
+<span class="sourceLineNo">842</span><a name="line.842"></a>
+<span class="sourceLineNo">843</span>  /**<a name="line.843"></a>
+<span class="sourceLineNo">844</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.844"></a>
+<span class="sourceLineNo">845</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.845"></a>
+<span class="sourceLineNo">846</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.846"></a>
+<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
+<span class="sourceLineNo">848</span>  private void preRegistrationInitialization() {<a name="line.848"></a>
+<span class="sourceLineNo">849</span>    try {<a name="line.849"></a>
+<span class="sourceLineNo">850</span>      initializeZooKeeper();<a name="line.850"></a>
+<span class="sourceLineNo">851</span>      setupClusterConnection();<a name="line.851"></a>
+<span class="sourceLineNo">852</span>      // Setup RPC client for master communication<a name="line.852"></a>
+<span class="sourceLineNo">853</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.853"></a>
+<span class="sourceLineNo">854</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.854"></a>
+<span class="sourceLineNo">855</span>    } catch (Throwable t) {<a name="line.855"></a>
+<span class="sourceLineNo">856</span>      // Call stop if error or process will stick around for ever since server<a name="line.856"></a>
+<span class="sourceLineNo">857</span>      // puts up non-daemon threads.<a name="line.857"></a>
+<span class="sourceLineNo">858</span>      this.rpcServices.stop();<a name="line.858"></a>
+<span class="sourceLineNo">859</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.859"></a>
+<span class="sourceLineNo">860</span>    }<a name="line.860"></a>
+<span class="sourceLineNo">861</span>  }<a name="line.861"></a>
+<span class="sourceLineNo">862</span><a name="line.862"></a>
+<span class="sourceLineNo">863</span>  /**<a name="line.863"></a>
+<span class="sourceLineNo">864</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.864"></a>
+<span class="sourceLineNo">865</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.865"></a>
+<span class="sourceLineNo">866</span>   * &lt;p&gt;<a name="line.866"></a>
+<span class="sourceLineNo">867</span>   * Finally open long-living server short-circuit connection.<a name="line.867"></a>
+<span class="sourceLineNo">868</span>   */<a name="line.868"></a>
+<span class="sourceLineNo">869</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.869"></a>
+<span class="sourceLineNo">870</span>    justification="cluster Id znode read would give us correct response")<a name="line.870"></a>
+<span class="sourceLineNo">871</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.871"></a>
+<span class="sourceLineNo">872</span>    // Nothing to do in here if no Master in the mix.<a name="line.872"></a>
+<span class="sourceLineNo">873</span>    if (this.masterless) {<a name="line.873"></a>
+<span class="sourceLineNo">874</span>      return;<a name="line.874"></a>
+<span class="sourceLineNo">875</span>    }<a name="line.875"></a>
 <span class="sourceLineNo">876</span><a name="line.876"></a>
-<span class="sourceLineNo">877</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    // when ready.<a name="line.878"></a>
-<span class="sourceLineNo">879</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.881"></a>
-<span class="sourceLineNo">882</span>    if (clusterId == null) {<a name="line.882"></a>
-<span class="sourceLineNo">883</span>      // Retrieve clusterId<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      // Since cluster status is now up<a name="line.884"></a>
-<span class="sourceLineNo">885</span>      // ID should have already been set by HMaster<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      try {<a name="line.886"></a>
-<span class="sourceLineNo">887</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>        if (clusterId == null) {<a name="line.888"></a>
-<span class="sourceLineNo">889</span>          this.abort("Cluster ID has not been set");<a name="line.889"></a>
-<span class="sourceLineNo">890</span>        }<a name="line.890"></a>
-<span class="sourceLineNo">891</span>        LOG.info("ClusterId : " + clusterId);<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      } catch (KeeperException e) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.893"></a>
-<span class="sourceLineNo">894</span>      }<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    }<a name="line.895"></a>
-<span class="sourceLineNo">896</span><a name="line.896"></a>
-<span class="sourceLineNo">897</span>    waitForMasterActive();<a name="line.897"></a>
-<span class="sourceLineNo">898</span>    if (isStopped() || isAborted()) {<a name="line.898"></a>
-<span class="sourceLineNo">899</span>      return; // No need for further initialization<a name="line.899"></a>
+<span class="sourceLineNo">877</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.877"></a>
+<span class="sourceLineNo">878</span>    // block until a master is available.  No point in starting up if no master<a name="line.878"></a>
+<span class="sourceLineNo">879</span>    // running.<a name="line.879"></a>
+<span class="sourceLineNo">880</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.880"></a>
+<span class="sourceLineNo">881</span><a name="line.881"></a>
+<span class="sourceLineNo">882</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.882"></a>
+<span class="sourceLineNo">883</span>    // when ready.<a name="line.883"></a>
+<span class="sourceLineNo">884</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.884"></a>
+<span class="sourceLineNo">885</span><a name="line.885"></a>
+<span class="sourceLineNo">886</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.886"></a>
+<span class="sourceLineNo">887</span>    if (clusterId == null) {<a name="line.887"></a>
+<span class="sourceLineNo">888</span>      // Retrieve clusterId<a name="line.888"></a>
+<span class="sourceLineNo">889</span>      // Since cluster status is now up<a name="line.889"></a>
+<span class="sourceLineNo">890</span>      // ID should have already been set by HMaster<a name="line.890"></a>
+<span class="sourceLineNo">891</span>      try {<a name="line.891"></a>
+<span class="sourceLineNo">892</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.892"></a>
+<span class="sourceLineNo">893</span>        if (clusterId == null) {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>          this.abort("Cluster ID has not been set");<a name="line.894"></a>
+<span class="sourceLineNo">895</span>        }<a name="line.895"></a>
+<span class="sourceLineNo">896</span>        LOG.info("ClusterId : " + clusterId);<a name="line.896"></a>
+<span class="sourceLineNo">897</span>      } catch (KeeperException e) {<a name="line.897"></a>
+<span class="sourceLineNo">898</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.898"></a>
+<span class="sourceLineNo">899</span>      }<a name="line.899"></a>
 <span class="sourceLineNo">900</span>    }<a name="line.900"></a>
 <span class="sourceLineNo">901</span><a name="line.901"></a>
-<span class="sourceLineNo">902</span>    // watch for snapshots and other procedures<a name="line.902"></a>
-<span class="sourceLineNo">903</span>    try {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>      rspmHost.loadProcedures(conf);<a name="line.905"></a>
-<span class="sourceLineNo">906</span>      rspmHost.initialize(this);<a name="line.906"></a>
-<span class="sourceLineNo">907</span>    } catch (KeeperException e) {<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.908"></a>
-<span class="sourceLineNo">909</span>    }<a name="line.909"></a>
-<span class="sourceLineNo">910</span>  }<a name="line.910"></a>
-<span class="sourceLineNo">911</span><a name="line.911"></a>
-<span class="sourceLineNo">912</span>  /**<a name="line.912"></a>
-<span class="sourceLineNo">913</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.913"></a>
-<span class="sourceLineNo">914</span>   * if the region server is shut down<a name="line.914"></a>
-<span class="sourceLineNo">915</span>   * @param tracker znode tracker to use<a name="line.915"></a>
-<span class="sourceLineNo">916</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.916"></a>
-<span class="sourceLineNo">917</span>   * @throws InterruptedException<a name="line.917"></a>
-<span class="sourceLineNo">918</span>   */<a name="line.918"></a>
-<span class="sourceLineNo">919</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      throws IOException, InterruptedException {<a name="line.920"></a>
-<span class="sourceLineNo">921</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>      if (this.stopped) {<a name="line.922"></a>
-<span class="sourceLineNo">923</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      }<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span>  }<a name="line.926"></a>
-<span class="sourceLineNo">927</span><a name="line.927"></a>
-<span class="sourceLineNo">928</span>  /**<a name="line.928"></a>
-<span class="sourceLineNo">929</span>   * @return True if the cluster is up.<a name="line.929"></a>
-<span class="sourceLineNo">930</span>   */<a name="line.930"></a>
-<span class="sourceLineNo">931</span>  @Override<a name="line.931"></a>
-<span class="sourceLineNo">932</span>  public boolean isClusterUp() {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    return this.masterless ||<a name="line.933"></a>
-<span class="sourceLineNo">934</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.934"></a>
-<span class="sourceLineNo">935</span>  }<a name="line.935"></a>
-<span class="sourceLineNo">936</span><a name="line.936"></a>
-<span class="sourceLineNo">937</span>  /**<a name="line.937"></a>
-<span class="sourceLineNo">938</span>   * The HRegionServer sticks in this loop until closed.<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   */<a name="line.939"></a>
-<span class="sourceLineNo">940</span>  @Override<a name="line.940"></a>
-<span class="sourceLineNo">941</span>  public void run() {<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    try {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.943"></a>
-<span class="sourceLineNo">944</span>      preRegistrationInitialization();<a name="line.944"></a>
-<span class="sourceLineNo">945</span>    } catch (Throwable e) {<a name="line.945"></a>
-<span class="sourceLineNo">946</span>      abort("Fatal exception during initialization", e);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    try {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.951"></a>
-<span class="sourceLineNo">952</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.952"></a>
-<span class="sourceLineNo">953</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.953"></a>
-<span class="sourceLineNo">954</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      }<a name="line.955"></a>
-<span class="sourceLineNo">956</span><a name="line.956"></a>
-<span class="sourceLineNo">957</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.957"></a>
-<span class="sourceLineNo">958</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      LOG.debug("About to register with Master.");<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.961"></a>
-<span class="sourceLineNo">962</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.962"></a>
-<span class="sourceLineNo">963</span>      RetryCounter rc = rcf.create();<a name="line.963"></a>
-<span class="sourceLineNo">964</span>      while (keepLooping()) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.965"></a>
-<span class="sourceLineNo">966</span>        if (w == null) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.967"></a>
-<span class="sourceLineNo">968</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.968"></a>
-<span class="sourceLineNo">969</span>          this.sleeper.sleep(sleepTime);<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        } else {<a name="line.970"></a>
-<span class="sourceLineNo">971</span>          handleReportForDutyResponse(w);<a name="line.971"></a>
-<span class="sourceLineNo">972</span>          break;<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        }<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span><a name="line.975"></a>
-<span class="sourceLineNo">976</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>        // start the snapshot handler and other procedure handlers,<a name="line.977"></a>
-<span class="sourceLineNo">978</span>        // since the server is ready to run<a name="line.978"></a>
-<span class="sourceLineNo">979</span>        if (this.rspmHost != null) {<a name="line.979"></a>
-<span class="sourceLineNo">980</span>          this.rspmHost.start();<a name="line.980"></a>
-<span class="sourceLineNo">981</span>        }<a name="line.981"></a>
-<span class="sourceLineNo">982</span>        // Start the Quota Manager<a name="line.982"></a>
-<span class="sourceLineNo">983</span>        if (this.rsQuotaManager != null) {<a name="line.983"></a>
-<span class="sourceLineNo">984</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.984"></a>
-<span class="sourceLineNo">985</span>        }<a name="line.985"></a>
-<span class="sourceLineNo">986</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.986"></a>
-<span class="sourceLineNo">987</span>          this.rsSpaceQuotaManager.start();<a name="line.987"></a>
-<span class="sourceLineNo">988</span>        }<a name="line.988"></a>
-<span class="sourceLineNo">989</span>      }<a name="line.989"></a>
-<span class="sourceLineNo">990</span><a name="line.990"></a>
-<span class="sourceLineNo">991</span>      // We registered with the Master.  Go into run mode.<a name="line.991"></a>
-<span class="sourceLineNo">992</span>      long lastMsg = System.currentTimeMillis();<a name="line.992"></a>
-<span class="sourceLineNo">993</span>      long oldRequestCount = -1;<a name="line.993"></a>
-<span class="sourceLineNo">994</span>      // The main run loop.<a name="line.994"></a>
-<span class="sourceLineNo">995</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.995"></a>
-<span class="sourceLineNo">996</span>        if (!isClusterUp()) {<a name="line.996"></a>
-<span class="sourceLineNo">997</span>          if (isOnlineRegionsEmpty()) {<a name="line.997"></a>
-<span class="sourceLineNo">998</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.998"></a>
-<span class="sourceLineNo">999</span>          } else if (!this.stopping) {<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>            this.stopping = true;<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>            LOG.info("Closing user regions");<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>            closeUserRegions(this.abortRequested);<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>          } else if (this.stopping) {<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>            if (allUserRegionsOffline) {<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>              // Set stopped if no more write requests tp meta tables<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span>              // since last time we went around the loop.  Any open<a name="line.1007"></a>
-<span class="sourceLineNo">1008</span>              // meta regions will be closed on our way out.<a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>                break;<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>              }<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>              oldRequestCount = getWriteRequestCount();<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>            } else {<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>              // Make sure all regions have been closed -- some regions may<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>              // have not got it because we were splitting at the time of<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>              // the call to closeUserRegions.<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>              closeUserRegions(this.abortRequested);<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span>            }<a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>          }<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>        }<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span>        long now = System.currentTimeMillis();<a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>        if ((now - lastMsg) &gt;= msgInterval) {<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>          tryRegionServerReport(lastMsg, now);<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>          lastMsg = System.currentTimeMillis();<a name="line.1026"></a>
+<span class="sourceLineNo">902</span>    waitForMasterActive();<a name="line.902"></a>
+<span class="sourceLineNo">903</span>    if (isStopped() || isAborted()) {<a name="line.903"></a>
+<span class="sourceLineNo">904</span>      return; // No need for further initialization<a name="line.904"></a>
+<span class="sourceLineNo">905</span>    }<a name="line.905"></a>
+<span class="sourceLineNo">906</span><a name="line.906"></a>
+<span class="sourceLineNo">907</span>    // watch for snapshots and other procedures<a name="line.907"></a>
+<span class="sourceLineNo">908</span>    try {<a name="line.908"></a>
+<span class="sourceLineNo">909</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.909"></a>
+<span class="sourceLineNo">910</span>      rspmHost.loadProcedures(conf);<a name="line.910"></a>
+<span class="sourceLineNo">911</span>      rspmHost.initialize(this);<a name="line.911"></a>
+<span class="sourceLineNo">912</span>    } catch (KeeperException e) {<a name="line.912"></a>
+<span class="sourceLineNo">913</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.913"></a>
+<span class="sourceLineNo">914</span>    }<a name="line.914"></a>
+<span class="sourceLineNo">915</span>  }<a name="line.915"></a>
+<span class="sourceLineNo">916</span><a name="line.916"></a>
+<span class="sourceLineNo">917</span>  /**<a name="line.917"></a>
+<span class="sourceLineNo">918</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.918"></a>
+<span class="sourceLineNo">919</span>   * if the region server is shut down<a name="line.919"></a>
+<span class="sourceLineNo">920</span>   * @param tracker znode tracker to use<a name="line.920"></a>
+<span class="sourceLineNo">921</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.921"></a>
+<span class="sourceLineNo">922</span>   * @throws InterruptedException<a name="line.922"></a>
+<span class="sourceLineNo">923</span>   */<a name="line.923"></a>
+<span class="sourceLineNo">924</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.924"></a>
+<span class="sourceLineNo">925</span>      throws IOException, InterruptedException {<a name="line.925"></a>
+<span class="sourceLineNo">926</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.926"></a>
+<span class="sourceLineNo">927</span>      if (this.stopped) {<a name="line.927"></a>
+<span class="sourceLineNo">928</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.928"></a>
+<span class="sourceLineNo">929</span>      }<a name="line.929"></a>
+<span class="sourceLineNo">930</span>    }<a name="line.930"></a>
+<span class="sourceLineNo">931</span>  }<a name="line.931"></a>
+<span class="sourceLineNo">932</span><a name="line.932"></a>
+<span class="sourceLineNo">933</span>  /**<a name="line.933"></a>
+<span class="sourceLineNo">934</span>   * @return True if the cluster is up.<a name="line.934"></a>
+<span class="sourceLineNo">935</span>   */<a name="line.935"></a>
+<span class="sourceLineNo">936</span>  @Override<a name="line.936"></a>
+<span class="sourceLineNo">937</span>  public boolean isClusterUp() {<a name="line.937"></a>
+<span class="sourceLineNo">938</span>    return this.masterless ||<a name="line.938"></a>
+<span class="sourceLineNo">939</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.939"></a>
+<span class="sourceLineNo">940</span>  }<a name="line.940"></a>
+<span class="sourceLineNo">941</span><a name="line.941"></a>
+<span class="sourceLineNo">942</span>  /**<a name="line.942"></a>
+<span class="sourceLineNo">943</span>   * The HRegionServer sticks in this loop until closed.<a name="line.943"></a>
+<span class="sourceLineNo">944</span>   */<a name="line.944"></a>
+<span class="sourceLineNo">945</span>  @Override<a name="line.945"></a>
+<span class="sourceLineNo">946</span>  public void run() {<a name="line.946"></a>
+<span class="sourceLineNo">947</span>    try {<a name="line.947"></a>
+<span class="sourceLineNo">948</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.948"></a>
+<span class="sourceLineNo">949</span>      preRegistrationInitialization();<a name="line.949"></a>
+<span class="sourceLineNo">950</span>    } catch (Throwable e) {<a name="line.950"></a>
+<span class="sourceLineNo">951</span>      abort("Fatal exception during initialization", e);<a name="line.951"></a>
+<span class="sourceLineNo">952</span>    }<a name="line.952"></a>
+<span class="sourceLineNo">953</span><a name="line.953"></a>
+<span class="sourceLineNo">954</span>    try {<a name="line.954"></a>
+<span class="sourceLineNo">955</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.957"></a>
+<span class="sourceLineNo">958</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.958"></a>
+<span class="sourceLineNo">959</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.959"></a>
+<span class="sourceLineNo">960</span>      }<a name="line.960"></a>
+<span class="sourceLineNo">961</span><a name="line.961"></a>
+<span class="sourceLineNo">962</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.962"></a>
+<span class="sourceLineNo">963</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.963"></a>
+<span class="sourceLineNo">964</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.964"></a>
+<span class="sourceLineNo">965</span>      LOG.debug("About to register with Master.");<a name="line.965"></a>
+<span class="sourceLineNo">966</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.966"></a>
+<span class="sourceLineNo">967</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.967"></a>
+<span class="sourceLineNo">968</span>      RetryCounter rc = rcf.create();<a name="line.968"></a>
+<span class="sourceLineNo">969</span>      while (keepLooping()) {<a name="line.969"></a>
+<span class="sourceLineNo">970</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.970"></a>
+<span class="sourceLineNo">971</span>        if (w == null) {<a name="line.971"></a>
+<span class="sourceLineNo">972</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.972"></a>
+<span class="sourceLineNo">973</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.973"></a>
+<span class="sourceLineNo">974</span>          this.sleeper.sleep(sleepTime);<a name="line.974"></a>
+<span class="sourceLineNo">975</span>        } else {<a name="line.975"></a>
+<span class="sourceLineNo">976</span>          handleReportForDutyResponse(w);<a name="line.976"></a>
+<span class="sourceLineNo">977</span>          break;<a name="line.977"></a>
+<span class="sourceLineNo">978</span>        }<a name="line.978"></a>
+<span class="sourceLineNo">979</span>      }<a name="line.979"></a>
+<span class="sourceLineNo">980</span><a name="line.980"></a>
+<span class="sourceLineNo">981</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.981"></a>
+<span class="sourceLineNo">982</span>        // start the snapshot handler and other procedure handlers,<a name="line.982"></a>
+<span class="sourceLineNo">983</span>        // since the server is ready to run<a name="line.983"></a>
+<span class="sourceLineNo">984</span>        if (this.rspmHost != null) {<a name="line.984"></a>
+<span class="sourceLineNo">985</span>          this.rspmHost.start();<a name="line.985"></a>
+<span class="sourceLineNo">986</span>        }<a name="line.986"></a>
+<span class="sourceLineNo">987</span>        // Start the Quota Manager<a name="line.987"></a>
+<span class="sourceLineNo">988</span>        if (this.rsQuotaManager != null) {<a name="line.988"></a>
+<span class="sourceLineNo">989</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.989"></a>
+<span class="sourceLineNo">990</span>        }<a name="line.990"></a>
+<span class="sourceLineNo">991</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.991"></a>
+<span class="sourceLineNo">992</span>          this.rsSpaceQuotaManager.start();<a name="line.992"></a>
+<span class="sourceLineNo">993</span>        }<a name="line.993"></a>
+<span class="sourceLineNo">994</span>      }<a name="line.994"></a>
+<span class="sourceLineNo">995</span><a name="line.995"></a>
+<span class="sourceLineNo">996</span>      // We registered with the Master.  Go into run mode.<a name="line.996"></a>
+<span class="sourceLineNo">997</span>      long lastMsg = System.currentTimeMillis();<a name="line.997"></a>
+<span class="sourceLineNo">998</span>      long oldRequestCount = -1;<a name="line.998"></a>
+<span class="sourceLineNo">999</span>      // The main run loop.<a name="line.999"></a>
+<span class="sourceLineNo">1000</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.1000"></a>
+<span class="sourceLineNo">1001</span>        if (!isClusterUp()) {<a name="line.1001"></a>
+<span class="sourceLineNo">1002</span>          if (isOnlineRegionsEmpty()) {<a name="line.1002"></a>
+<span class="sourceLineNo">1003</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.1003"></a>
+<span class="sourceLineNo">1004</span>          } else if (!this.stopping) {<a name="line.1004"></a>
+<span class="sourceLineNo">1005</span>            this.stopping = true;<a name="line.1005"></a>
+<span class="sourceLineNo">1006</span>            LOG.info("Closing user regions");<a name="line.1006"></a>
+<span class="sourceLineNo">1007</span>            closeUserRegions(this.abortRequested);<a name="line.1007"></a>
+<span class="sourceLineNo">1008</span>          } else if (this.stopping) {<a name="line.1008"></a>
+<span class="sourceLineNo">1009</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1009"></a>
+<span class="sourceLineNo">1010</span>            if (allUserRegionsOffline) {<a name="line.1010"></a>
+<span class="sourceLineNo">1011</span>              // Set stopped if no more write requests tp meta tables<a name="line.1011"></a>
+<span class="sourceLineNo">1012</span>              // since last time we went around the loop.  Any open<a name="line.1012"></a>
+<span class="sourceLineNo">1013</span>              // meta regions will be closed on our way out.<a name="line.1013"></a>
+<span class="sourceLineNo">1014</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1014"></a>
+<span class="sourceLineNo">1015</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1015"></a>
+<span class="sourceLineNo">1016</span>                break;<a name="line.1016"></a>
+<span class="sourceLineNo">1017</span>              }<a name="line.1017"></a>
+<span class="sourceLineNo">1018</span>              oldRequestCount = getWriteRequestCount();<a name="line.1018"></a>
+<span class="sourceLineNo">1019</span>            } else {<a name="line.1019"></a>
+<span class="sourceLineNo">1020</span>              // Make sure all regions have been closed -- some regions may<a name="line.1020"></a>
+<span class="sourceLineNo">1021</span>              // have not got it because we were splitting at the time of<a name="line.1021"></a>
+<span class="sourceLineNo">1022</span>              // the call to closeUserRegions.<a name="line.1022"></a>
+<span class="sourceLineNo">1023</span>              closeUserRegions(this.abortRequested);<a name="line.1023"></a>
+<span class="sourceLineNo">1024</span>            }<a name="line.1024"></a>
+<span class="sourceLineNo">1025</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1025"></a>
+<span class="sourceLineNo">1026</span>          }<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        }<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>        if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          this.sleeper.sleep();<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>        }<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>      } // for<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>    } catch (Throwable t) {<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>      if (!rpcServices.checkOOME(t)) {<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>        String prefix = t instanceof YouAreDeadException? "": "Unhandled: ";<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        abort(prefix + t.getMessage(), t);<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>      }<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>    }<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span><a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>    if (abortRequested) {<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>      Timer abortMonitor = new Timer("Abort regionserver monitor", true);<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>      TimerTask abortTimeoutTask = null;<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>      try {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>        abortTimeoutTask =<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>            Class.forName(conf.get(ABORT_TIMEOUT_TASK, SystemExitWhenAbortTimeout.class.getName()))<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>                .asSubclass(TimerTask.class).getDeclaredConstructor().newInstance();<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>      } catch (Exception e) {<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        LOG.warn("Initialize abort timeout task failed", e);<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>      }<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>      if (abortTimeoutTask != null) {

<TRUNCATED>

[13/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
index 6369c27..ea05301 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
@@ -603,3251 +603,3256 @@
 <span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
 <span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
 <span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.599"></a>
+<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span><a name="line.599"></a>
 <span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
 <span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      cacheConfig = new CacheConfig(conf, !isMasterNotCarryTable);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      mobCacheConfig = new MobCacheConfig(conf, !isMasterNotCarryTable);<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        @Override<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>        }<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      };<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>      initializeFileSystem();<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.612"></a>
-<span class="sourceLineNo">613</span><a name="line.613"></a>
-<span class="sourceLineNo">614</span>      this.configurationManager = new ConfigurationManager();<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.615"></a>
-<span class="sourceLineNo">616</span><a name="line.616"></a>
-<span class="sourceLineNo">617</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>        // Open connection to zookeeper and set primary watcher<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.620"></a>
-<span class="sourceLineNo">621</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.621"></a>
-<span class="sourceLineNo">622</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.622"></a>
-<span class="sourceLineNo">623</span>        if (!this.masterless) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>          masterAddressTracker.start();<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span>          clusterStatusTracker.start();<a name="line.630"></a>
-<span class="sourceLineNo">631</span>        } else {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker = null;<a name="line.632"></a>
-<span class="sourceLineNo">633</span>          clusterStatusTracker = null;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>        }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      } else {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        zooKeeper = null;<a name="line.636"></a>
-<span class="sourceLineNo">637</span>        masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>        clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>      }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      this.rpcServices.start(zooKeeper);<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.641"></a>
-<span class="sourceLineNo">642</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      // class HRS. TODO.<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      this.choreService = new ChoreService(getName(), true);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      this.executorService = new ExecutorService(getName());<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      putUpWebUI();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    } catch (Throwable t) {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      // cause of failed startup is lost.<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      LOG.error("Failed construction RegionServer", t);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      throw t;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>  }<a name="line.655"></a>
-<span class="sourceLineNo">656</span><a name="line.656"></a>
-<span class="sourceLineNo">657</span>  // HMaster should override this method to load the specific config for master<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.662"></a>
-<span class="sourceLineNo">663</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.663"></a>
-<span class="sourceLineNo">664</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>        throw new IOException(msg);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      } else {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return rpcServices.isa.getHostName();<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      }<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    } else {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      return hostname;<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    }<a name="line.671"></a>
-<span class="sourceLineNo">672</span>  }<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>  /**<a name="line.674"></a>
-<span class="sourceLineNo">675</span>   * If running on Windows, do windows-specific setup.<a name="line.675"></a>
-<span class="sourceLineNo">676</span>   */<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        @Override<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        public void handle(Signal signal) {<a name="line.681"></a>
-<span class="sourceLineNo">682</span>          conf.reloadConfiguration();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>          cm.notifyAllObservers(conf);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      });<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    NettyEventLoopGroupConfig nelgc =<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    return nelgc;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>  }<a name="line.696"></a>
-<span class="sourceLineNo">697</span><a name="line.697"></a>
-<span class="sourceLineNo">698</span>  private void initializeFileSystem() throws IOException {<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    // (unless all is set to defaults).<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    return new FSTableDescriptors(this.conf,<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.716"></a>
+<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
+<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
+<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
+<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
+<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
+<span class="sourceLineNo">615</span><a name="line.615"></a>
+<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
+<span class="sourceLineNo">618</span><a name="line.618"></a>
+<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
+<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
+<span class="sourceLineNo">621</span><a name="line.621"></a>
+<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
+<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
+<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
+<span class="sourceLineNo">630</span><a name="line.630"></a>
+<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
+<span class="sourceLineNo">633</span><a name="line.633"></a>
+<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
+<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
+<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
+<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
+<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
+<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
+<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
+<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
+<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
+<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
+<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
+<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
+<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
+<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
+<span class="sourceLineNo">661</span><a name="line.661"></a>
+<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
+<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
+<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
+<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
+<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
+<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
+<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
+<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
+<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
+<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
+<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
+<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
+<span class="sourceLineNo">678</span><a name="line.678"></a>
+<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
+<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
+<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
+<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
+<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
+<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
+<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
+<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
+<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
+<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
+<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
+<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
+<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
+<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
+<span class="sourceLineNo">693</span><a name="line.693"></a>
+<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
+<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
+<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
+<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
+<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
+<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
+<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
+<span class="sourceLineNo">702</span><a name="line.702"></a>
+<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
+<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
+<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
+<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
+<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
+<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
+<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
+<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
+<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
+<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
 <span class="sourceLineNo">717</span>  }<a name="line.717"></a>
 <span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return null;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    user.login("hbase.regionserver.keytab.file",<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.725"></a>
+<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
+<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
+<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
+<span class="sourceLineNo">723</span><a name="line.723"></a>
+<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
+<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
 <span class="sourceLineNo">726</span>  }<a name="line.726"></a>
 <span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span><a name="line.728"></a>
-<span class="sourceLineNo">729</span>  /**<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   * Wait for an active Master.<a name="line.730"></a>
-<span class="sourceLineNo">731</span>   * See override in Master superclass for how it is used.<a name="line.731"></a>
-<span class="sourceLineNo">732</span>   */<a name="line.732"></a>
-<span class="sourceLineNo">733</span>  protected void waitForMasterActive() {}<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>  protected String getProcessName() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    return REGIONSERVER;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>  protected boolean canCreateBaseZNode() {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    return this.masterless;<a name="line.740"></a>
-<span class="sourceLineNo">741</span>  }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>  protected boolean canUpdateTableDescriptor() {<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    return false;<a name="line.744"></a>
-<span class="sourceLineNo">745</span>  }<a name="line.745"></a>
-<span class="sourceLineNo">746</span><a name="line.746"></a>
-<span class="sourceLineNo">747</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    return new RSRpcServices(this);<a name="line.748"></a>
-<span class="sourceLineNo">749</span>  }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>  protected void configureInfoServer() {<a name="line.751"></a>
-<span class="sourceLineNo">752</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.753"></a>
+<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
+<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
+<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
+<span class="sourceLineNo">732</span><a name="line.732"></a>
+<span class="sourceLineNo">733</span><a name="line.733"></a>
+<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
+<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
+<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
+<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
+<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">739</span><a name="line.739"></a>
+<span class="sourceLineNo">740</span>  protected String getProcessName() {<a name="line.740"></a>
+<span class="sourceLineNo">741</span>    return REGIONSERVER;<a name="line.741"></a>
+<span class="sourceLineNo">742</span>  }<a name="line.742"></a>
+<span class="sourceLineNo">743</span><a name="line.743"></a>
+<span class="sourceLineNo">744</span>  protected boolean canCreateBaseZNode() {<a name="line.744"></a>
+<span class="sourceLineNo">745</span>    return this.masterless;<a name="line.745"></a>
+<span class="sourceLineNo">746</span>  }<a name="line.746"></a>
+<span class="sourceLineNo">747</span><a name="line.747"></a>
+<span class="sourceLineNo">748</span>  protected boolean canUpdateTableDescriptor() {<a name="line.748"></a>
+<span class="sourceLineNo">749</span>    return false;<a name="line.749"></a>
+<span class="sourceLineNo">750</span>  }<a name="line.750"></a>
+<span class="sourceLineNo">751</span><a name="line.751"></a>
+<span class="sourceLineNo">752</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.752"></a>
+<span class="sourceLineNo">753</span>    return new RSRpcServices(this);<a name="line.753"></a>
 <span class="sourceLineNo">754</span>  }<a name="line.754"></a>
 <span class="sourceLineNo">755</span><a name="line.755"></a>
-<span class="sourceLineNo">756</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>    return RSDumpServlet.class;<a name="line.757"></a>
-<span class="sourceLineNo">758</span>  }<a name="line.758"></a>
-<span class="sourceLineNo">759</span><a name="line.759"></a>
-<span class="sourceLineNo">760</span>  @Override<a name="line.760"></a>
-<span class="sourceLineNo">761</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    /*<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * No stacking of instances is allowed for a single executorService name<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        instance.getDescriptorForType();<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.769"></a>
-<span class="sourceLineNo">770</span>          + " already registered, rejecting request from " + instance);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      return false;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>    }<a name="line.772"></a>
-<span class="sourceLineNo">773</span><a name="line.773"></a>
-<span class="sourceLineNo">774</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>    if (LOG.isDebugEnabled()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.776"></a>
+<span class="sourceLineNo">756</span>  protected void configureInfoServer() {<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.758"></a>
+<span class="sourceLineNo">759</span>  }<a name="line.759"></a>
+<span class="sourceLineNo">760</span><a name="line.760"></a>
+<span class="sourceLineNo">761</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.761"></a>
+<span class="sourceLineNo">762</span>    return RSDumpServlet.class;<a name="line.762"></a>
+<span class="sourceLineNo">763</span>  }<a name="line.763"></a>
+<span class="sourceLineNo">764</span><a name="line.764"></a>
+<span class="sourceLineNo">765</span>  @Override<a name="line.765"></a>
+<span class="sourceLineNo">766</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>    /*<a name="line.767"></a>
+<span class="sourceLineNo">768</span>     * No stacking of instances is allowed for a single executorService name<a name="line.768"></a>
+<span class="sourceLineNo">769</span>     */<a name="line.769"></a>
+<span class="sourceLineNo">770</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.770"></a>
+<span class="sourceLineNo">771</span>        instance.getDescriptorForType();<a name="line.771"></a>
+<span class="sourceLineNo">772</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.774"></a>
+<span class="sourceLineNo">775</span>          + " already registered, rejecting request from " + instance);<a name="line.775"></a>
+<span class="sourceLineNo">776</span>      return false;<a name="line.776"></a>
 <span class="sourceLineNo">777</span>    }<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    return true;<a name="line.778"></a>
-<span class="sourceLineNo">779</span>  }<a name="line.779"></a>
-<span class="sourceLineNo">780</span><a name="line.780"></a>
-<span class="sourceLineNo">781</span>  /**<a name="line.781"></a>
-<span class="sourceLineNo">782</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   */<a name="line.784"></a>
-<span class="sourceLineNo">785</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    Configuration conf = this.conf;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.787"></a>
-<span class="sourceLineNo">788</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.788"></a>
-<span class="sourceLineNo">789</span>      // the conf and unset the client ZK related properties<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      conf = new Configuration(this.conf);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.791"></a>
-<span class="sourceLineNo">792</span>    }<a name="line.792"></a>
-<span class="sourceLineNo">793</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    // and remote invocations.<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.799"></a>
-<span class="sourceLineNo">800</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.801"></a>
-<span class="sourceLineNo">802</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    return conn;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>  }<a name="line.805"></a>
-<span class="sourceLineNo">806</span><a name="line.806"></a>
-<span class="sourceLineNo">807</span>  /**<a name="line.807"></a>
-<span class="sourceLineNo">808</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.808"></a>
-<span class="sourceLineNo">809</span>   * @param c<a name="line.809"></a>
-<span class="sourceLineNo">810</span>   * @throws IOException<a name="line.810"></a>
-<span class="sourceLineNo">811</span>   */<a name="line.811"></a>
-<span class="sourceLineNo">812</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>    // check to see if the codec list is available:<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    if (codecs == null) return;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>    for (String codec : codecs) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.817"></a>
-<span class="sourceLineNo">818</span>        throw new IOException("Compression codec " + codec +<a name="line.818"></a>
-<span class="sourceLineNo">819</span>          " not supported, aborting RS construction");<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      }<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    }<a name="line.821"></a>
-<span class="sourceLineNo">822</span>  }<a name="line.822"></a>
-<span class="sourceLineNo">823</span><a name="line.823"></a>
-<span class="sourceLineNo">824</span>  public String getClusterId() {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>    return this.clusterId;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>  }<a name="line.826"></a>
-<span class="sourceLineNo">827</span><a name="line.827"></a>
-<span class="sourceLineNo">828</span>  /**<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * Setup our cluster connection if not already initialized.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   */<a name="line.830"></a>
-<span class="sourceLineNo">831</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>    if (clusterConnection == null) {<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      clusterConnection = createClusterConnection();<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      metaTableLocator = new MetaTableLocator();<a name="line.834"></a>
-<span class="sourceLineNo">835</span>    }<a name="line.835"></a>
-<span class="sourceLineNo">836</span>  }<a name="line.836"></a>
-<span class="sourceLineNo">837</span><a name="line.837"></a>
-<span class="sourceLineNo">838</span>  /**<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.839"></a>
-<span class="sourceLineNo">840</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.840"></a>
-<span class="sourceLineNo">841</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.841"></a>
-<span class="sourceLineNo">842</span>   */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>  private void preRegistrationInitialization() {<a name="line.843"></a>
-<span class="sourceLineNo">844</span>    try {<a name="line.844"></a>
-<span class="sourceLineNo">845</span>      initializeZooKeeper();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>      setupClusterConnection();<a name="line.846"></a>
-<span class="sourceLineNo">847</span>      // Setup RPC client for master communication<a name="line.847"></a>
-<span class="sourceLineNo">848</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    } catch (Throwable t) {<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      // Call stop if error or process will stick around for ever since server<a name="line.851"></a>
-<span class="sourceLineNo">852</span>      // puts up non-daemon threads.<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      this.rpcServices.stop();<a name="line.853"></a>
-<span class="sourceLineNo">854</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.854"></a>
-<span class="sourceLineNo">855</span>    }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>  }<a name="line.856"></a>
-<span class="sourceLineNo">857</span><a name="line.857"></a>
-<span class="sourceLineNo">858</span>  /**<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * &lt;p&gt;<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   * Finally open long-living server short-circuit connection.<a name="line.862"></a>
-<span class="sourceLineNo">863</span>   */<a name="line.863"></a>
-<span class="sourceLineNo">864</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    justification="cluster Id znode read would give us correct response")<a name="line.865"></a>
-<span class="sourceLineNo">866</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    // Nothing to do in here if no Master in the mix.<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    if (this.masterless) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      return;<a name="line.869"></a>
-<span class="sourceLineNo">870</span>    }<a name="line.870"></a>
-<span class="sourceLineNo">871</span><a name="line.871"></a>
-<span class="sourceLineNo">872</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.872"></a>
-<span class="sourceLineNo">873</span>    // block until a master is available.  No point in starting up if no master<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // running.<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.875"></a>
+<span class="sourceLineNo">778</span><a name="line.778"></a>
+<span class="sourceLineNo">779</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    if (LOG.isDebugEnabled()) {<a name="line.780"></a>
+<span class="sourceLineNo">781</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.781"></a>
+<span class="sourceLineNo">782</span>    }<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    return true;<a name="line.783"></a>
+<span class="sourceLineNo">784</span>  }<a name="line.784"></a>
+<span class="sourceLineNo">785</span><a name="line.785"></a>
+<span class="sourceLineNo">786</span>  /**<a name="line.786"></a>
+<span class="sourceLineNo">787</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.787"></a>
+<span class="sourceLineNo">788</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.788"></a>
+<span class="sourceLineNo">789</span>   */<a name="line.789"></a>
+<span class="sourceLineNo">790</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.790"></a>
+<span class="sourceLineNo">791</span>    Configuration conf = this.conf;<a name="line.791"></a>
+<span class="sourceLineNo">792</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.792"></a>
+<span class="sourceLineNo">793</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.793"></a>
+<span class="sourceLineNo">794</span>      // the conf and unset the client ZK related properties<a name="line.794"></a>
+<span class="sourceLineNo">795</span>      conf = new Configuration(this.conf);<a name="line.795"></a>
+<span class="sourceLineNo">796</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.796"></a>
+<span class="sourceLineNo">797</span>    }<a name="line.797"></a>
+<span class="sourceLineNo">798</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.798"></a>
+<span class="sourceLineNo">799</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.799"></a>
+<span class="sourceLineNo">800</span>    // and remote invocations.<a name="line.800"></a>
+<span class="sourceLineNo">801</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.801"></a>
+<span class="sourceLineNo">802</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.802"></a>
+<span class="sourceLineNo">803</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.803"></a>
+<span class="sourceLineNo">804</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.804"></a>
+<span class="sourceLineNo">805</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.805"></a>
+<span class="sourceLineNo">806</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.806"></a>
+<span class="sourceLineNo">807</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.807"></a>
+<span class="sourceLineNo">808</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.808"></a>
+<span class="sourceLineNo">809</span>    return conn;<a name="line.809"></a>
+<span class="sourceLineNo">810</span>  }<a name="line.810"></a>
+<span class="sourceLineNo">811</span><a name="line.811"></a>
+<span class="sourceLineNo">812</span>  /**<a name="line.812"></a>
+<span class="sourceLineNo">813</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.813"></a>
+<span class="sourceLineNo">814</span>   * @param c<a name="line.814"></a>
+<span class="sourceLineNo">815</span>   * @throws IOException<a name="line.815"></a>
+<span class="sourceLineNo">816</span>   */<a name="line.816"></a>
+<span class="sourceLineNo">817</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.817"></a>
+<span class="sourceLineNo">818</span>    // check to see if the codec list is available:<a name="line.818"></a>
+<span class="sourceLineNo">819</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.819"></a>
+<span class="sourceLineNo">820</span>    if (codecs == null) return;<a name="line.820"></a>
+<span class="sourceLineNo">821</span>    for (String codec : codecs) {<a name="line.821"></a>
+<span class="sourceLineNo">822</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.822"></a>
+<span class="sourceLineNo">823</span>        throw new IOException("Compression codec " + codec +<a name="line.823"></a>
+<span class="sourceLineNo">824</span>          " not supported, aborting RS construction");<a name="line.824"></a>
+<span class="sourceLineNo">825</span>      }<a name="line.825"></a>
+<span class="sourceLineNo">826</span>    }<a name="line.826"></a>
+<span class="sourceLineNo">827</span>  }<a name="line.827"></a>
+<span class="sourceLineNo">828</span><a name="line.828"></a>
+<span class="sourceLineNo">829</span>  public String getClusterId() {<a name="line.829"></a>
+<span class="sourceLineNo">830</span>    return this.clusterId;<a name="line.830"></a>
+<span class="sourceLineNo">831</span>  }<a name="line.831"></a>
+<span class="sourceLineNo">832</span><a name="line.832"></a>
+<span class="sourceLineNo">833</span>  /**<a name="line.833"></a>
+<span class="sourceLineNo">834</span>   * Setup our cluster connection if not already initialized.<a name="line.834"></a>
+<span class="sourceLineNo">835</span>   */<a name="line.835"></a>
+<span class="sourceLineNo">836</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.836"></a>
+<span class="sourceLineNo">837</span>    if (clusterConnection == null) {<a name="line.837"></a>
+<span class="sourceLineNo">838</span>      clusterConnection = createClusterConnection();<a name="line.838"></a>
+<span class="sourceLineNo">839</span>      metaTableLocator = new MetaTableLocator();<a name="line.839"></a>
+<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
+<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
+<span class="sourceLineNo">842</span><a name="line.842"></a>
+<span class="sourceLineNo">843</span>  /**<a name="line.843"></a>
+<span class="sourceLineNo">844</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.844"></a>
+<span class="sourceLineNo">845</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.845"></a>
+<span class="sourceLineNo">846</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.846"></a>
+<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
+<span class="sourceLineNo">848</span>  private void preRegistrationInitialization() {<a name="line.848"></a>
+<span class="sourceLineNo">849</span>    try {<a name="line.849"></a>
+<span class="sourceLineNo">850</span>      initializeZooKeeper();<a name="line.850"></a>
+<span class="sourceLineNo">851</span>      setupClusterConnection();<a name="line.851"></a>
+<span class="sourceLineNo">852</span>      // Setup RPC client for master communication<a name="line.852"></a>
+<span class="sourceLineNo">853</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.853"></a>
+<span class="sourceLineNo">854</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.854"></a>
+<span class="sourceLineNo">855</span>    } catch (Throwable t) {<a name="line.855"></a>
+<span class="sourceLineNo">856</span>      // Call stop if error or process will stick around for ever since server<a name="line.856"></a>
+<span class="sourceLineNo">857</span>      // puts up non-daemon threads.<a name="line.857"></a>
+<span class="sourceLineNo">858</span>      this.rpcServices.stop();<a name="line.858"></a>
+<span class="sourceLineNo">859</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.859"></a>
+<span class="sourceLineNo">860</span>    }<a name="line.860"></a>
+<span class="sourceLineNo">861</span>  }<a name="line.861"></a>
+<span class="sourceLineNo">862</span><a name="line.862"></a>
+<span class="sourceLineNo">863</span>  /**<a name="line.863"></a>
+<span class="sourceLineNo">864</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.864"></a>
+<span class="sourceLineNo">865</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.865"></a>
+<span class="sourceLineNo">866</span>   * &lt;p&gt;<a name="line.866"></a>
+<span class="sourceLineNo">867</span>   * Finally open long-living server short-circuit connection.<a name="line.867"></a>
+<span class="sourceLineNo">868</span>   */<a name="line.868"></a>
+<span class="sourceLineNo">869</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.869"></a>
+<span class="sourceLineNo">870</span>    justification="cluster Id znode read would give us correct response")<a name="line.870"></a>
+<span class="sourceLineNo">871</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.871"></a>
+<span class="sourceLineNo">872</span>    // Nothing to do in here if no Master in the mix.<a name="line.872"></a>
+<span class="sourceLineNo">873</span>    if (this.masterless) {<a name="line.873"></a>
+<span class="sourceLineNo">874</span>      return;<a name="line.874"></a>
+<span class="sourceLineNo">875</span>    }<a name="line.875"></a>
 <span class="sourceLineNo">876</span><a name="line.876"></a>
-<span class="sourceLineNo">877</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    // when ready.<a name="line.878"></a>
-<span class="sourceLineNo">879</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.881"></a>
-<span class="sourceLineNo">882</span>    if (clusterId == null) {<a name="line.882"></a>
-<span class="sourceLineNo">883</span>      // Retrieve clusterId<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      // Since cluster status is now up<a name="line.884"></a>
-<span class="sourceLineNo">885</span>      // ID should have already been set by HMaster<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      try {<a name="line.886"></a>
-<span class="sourceLineNo">887</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>        if (clusterId == null) {<a name="line.888"></a>
-<span class="sourceLineNo">889</span>          this.abort("Cluster ID has not been set");<a name="line.889"></a>
-<span class="sourceLineNo">890</span>        }<a name="line.890"></a>
-<span class="sourceLineNo">891</span>        LOG.info("ClusterId : " + clusterId);<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      } catch (KeeperException e) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.893"></a>
-<span class="sourceLineNo">894</span>      }<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    }<a name="line.895"></a>
-<span class="sourceLineNo">896</span><a name="line.896"></a>
-<span class="sourceLineNo">897</span>    waitForMasterActive();<a name="line.897"></a>
-<span class="sourceLineNo">898</span>    if (isStopped() || isAborted()) {<a name="line.898"></a>
-<span class="sourceLineNo">899</span>      return; // No need for further initialization<a name="line.899"></a>
+<span class="sourceLineNo">877</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.877"></a>
+<span class="sourceLineNo">878</span>    // block until a master is available.  No point in starting up if no master<a name="line.878"></a>
+<span class="sourceLineNo">879</span>    // running.<a name="line.879"></a>
+<span class="sourceLineNo">880</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.880"></a>
+<span class="sourceLineNo">881</span><a name="line.881"></a>
+<span class="sourceLineNo">882</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.882"></a>
+<span class="sourceLineNo">883</span>    // when ready.<a name="line.883"></a>
+<span class="sourceLineNo">884</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.884"></a>
+<span class="sourceLineNo">885</span><a name="line.885"></a>
+<span class="sourceLineNo">886</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.886"></a>
+<span class="sourceLineNo">887</span>    if (clusterId == null) {<a name="line.887"></a>
+<span class="sourceLineNo">888</span>      // Retrieve clusterId<a name="line.888"></a>
+<span class="sourceLineNo">889</span>      // Since cluster status is now up<a name="line.889"></a>
+<span class="sourceLineNo">890</span>      // ID should have already been set by HMaster<a name="line.890"></a>
+<span class="sourceLineNo">891</span>      try {<a name="line.891"></a>
+<span class="sourceLineNo">892</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.892"></a>
+<span class="sourceLineNo">893</span>        if (clusterId == null) {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>          this.abort("Cluster ID has not been set");<a name="line.894"></a>
+<span class="sourceLineNo">895</span>        }<a name="line.895"></a>
+<span class="sourceLineNo">896</span>        LOG.info("ClusterId : " + clusterId);<a name="line.896"></a>
+<span class="sourceLineNo">897</span>      } catch (KeeperException e) {<a name="line.897"></a>
+<span class="sourceLineNo">898</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.898"></a>
+<span class="sourceLineNo">899</span>      }<a name="line.899"></a>
 <span class="sourceLineNo">900</span>    }<a name="line.900"></a>
 <span class="sourceLineNo">901</span><a name="line.901"></a>
-<span class="sourceLineNo">902</span>    // watch for snapshots and other procedures<a name="line.902"></a>
-<span class="sourceLineNo">903</span>    try {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>      rspmHost.loadProcedures(conf);<a name="line.905"></a>
-<span class="sourceLineNo">906</span>      rspmHost.initialize(this);<a name="line.906"></a>
-<span class="sourceLineNo">907</span>    } catch (KeeperException e) {<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.908"></a>
-<span class="sourceLineNo">909</span>    }<a name="line.909"></a>
-<span class="sourceLineNo">910</span>  }<a name="line.910"></a>
-<span class="sourceLineNo">911</span><a name="line.911"></a>
-<span class="sourceLineNo">912</span>  /**<a name="line.912"></a>
-<span class="sourceLineNo">913</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.913"></a>
-<span class="sourceLineNo">914</span>   * if the region server is shut down<a name="line.914"></a>
-<span class="sourceLineNo">915</span>   * @param tracker znode tracker to use<a name="line.915"></a>
-<span class="sourceLineNo">916</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.916"></a>
-<span class="sourceLineNo">917</span>   * @throws InterruptedException<a name="line.917"></a>
-<span class="sourceLineNo">918</span>   */<a name="line.918"></a>
-<span class="sourceLineNo">919</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      throws IOException, InterruptedException {<a name="line.920"></a>
-<span class="sourceLineNo">921</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>      if (this.stopped) {<a name="line.922"></a>
-<span class="sourceLineNo">923</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      }<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span>  }<a name="line.926"></a>
-<span class="sourceLineNo">927</span><a name="line.927"></a>
-<span class="sourceLineNo">928</span>  /**<a name="line.928"></a>
-<span class="sourceLineNo">929</span>   * @return True if the cluster is up.<a name="line.929"></a>
-<span class="sourceLineNo">930</span>   */<a name="line.930"></a>
-<span class="sourceLineNo">931</span>  @Override<a name="line.931"></a>
-<span class="sourceLineNo">932</span>  public boolean isClusterUp() {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    return this.masterless ||<a name="line.933"></a>
-<span class="sourceLineNo">934</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.934"></a>
-<span class="sourceLineNo">935</span>  }<a name="line.935"></a>
-<span class="sourceLineNo">936</span><a name="line.936"></a>
-<span class="sourceLineNo">937</span>  /**<a name="line.937"></a>
-<span class="sourceLineNo">938</span>   * The HRegionServer sticks in this loop until closed.<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   */<a name="line.939"></a>
-<span class="sourceLineNo">940</span>  @Override<a name="line.940"></a>
-<span class="sourceLineNo">941</span>  public void run() {<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    try {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.943"></a>
-<span class="sourceLineNo">944</span>      preRegistrationInitialization();<a name="line.944"></a>
-<span class="sourceLineNo">945</span>    } catch (Throwable e) {<a name="line.945"></a>
-<span class="sourceLineNo">946</span>      abort("Fatal exception during initialization", e);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    try {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.951"></a>
-<span class="sourceLineNo">952</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.952"></a>
-<span class="sourceLineNo">953</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.953"></a>
-<span class="sourceLineNo">954</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      }<a name="line.955"></a>
-<span class="sourceLineNo">956</span><a name="line.956"></a>
-<span class="sourceLineNo">957</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.957"></a>
-<span class="sourceLineNo">958</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      LOG.debug("About to register with Master.");<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.961"></a>
-<span class="sourceLineNo">962</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.962"></a>
-<span class="sourceLineNo">963</span>      RetryCounter rc = rcf.create();<a name="line.963"></a>
-<span class="sourceLineNo">964</span>      while (keepLooping()) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.965"></a>
-<span class="sourceLineNo">966</span>        if (w == null) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.967"></a>
-<span class="sourceLineNo">968</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.968"></a>
-<span class="sourceLineNo">969</span>          this.sleeper.sleep(sleepTime);<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        } else {<a name="line.970"></a>
-<span class="sourceLineNo">971</span>          handleReportForDutyResponse(w);<a name="line.971"></a>
-<span class="sourceLineNo">972</span>          break;<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        }<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span><a name="line.975"></a>
-<span class="sourceLineNo">976</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>        // start the snapshot handler and other procedure handlers,<a name="line.977"></a>
-<span class="sourceLineNo">978</span>        // since the server is ready to run<a name="line.978"></a>
-<span class="sourceLineNo">979</span>        if (this.rspmHost != null) {<a name="line.979"></a>
-<span class="sourceLineNo">980</span>          this.rspmHost.start();<a name="line.980"></a>
-<span class="sourceLineNo">981</span>        }<a name="line.981"></a>
-<span class="sourceLineNo">982</span>        // Start the Quota Manager<a name="line.982"></a>
-<span class="sourceLineNo">983</span>        if (this.rsQuotaManager != null) {<a name="line.983"></a>
-<span class="sourceLineNo">984</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.984"></a>
-<span class="sourceLineNo">985</span>        }<a name="line.985"></a>
-<span class="sourceLineNo">986</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.986"></a>
-<span class="sourceLineNo">987</span>          this.rsSpaceQuotaManager.start();<a name="line.987"></a>
-<span class="sourceLineNo">988</span>        }<a name="line.988"></a>
-<span class="sourceLineNo">989</span>      }<a name="line.989"></a>
-<span class="sourceLineNo">990</span><a name="line.990"></a>
-<span class="sourceLineNo">991</span>      // We registered with the Master.  Go into run mode.<a name="line.991"></a>
-<span class="sourceLineNo">992</span>      long lastMsg = System.currentTimeMillis();<a name="line.992"></a>
-<span class="sourceLineNo">993</span>      long oldRequestCount = -1;<a name="line.993"></a>
-<span class="sourceLineNo">994</span>      // The main run loop.<a name="line.994"></a>
-<span class="sourceLineNo">995</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.995"></a>
-<span class="sourceLineNo">996</span>        if (!isClusterUp()) {<a name="line.996"></a>
-<span class="sourceLineNo">997</span>          if (isOnlineRegionsEmpty()) {<a name="line.997"></a>
-<span class="sourceLineNo">998</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.998"></a>
-<span class="sourceLineNo">999</span>          } else if (!this.stopping) {<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>            this.stopping = true;<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>            LOG.info("Closing user regions");<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>            closeUserRegions(this.abortRequested);<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>          } else if (this.stopping) {<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>            if (allUserRegionsOffline) {<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>              // Set stopped if no more write requests tp meta tables<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span>              // since last time we went around the loop.  Any open<a name="line.1007"></a>
-<span class="sourceLineNo">1008</span>              // meta regions will be closed on our way out.<a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>                break;<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>              }<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>              oldRequestCount = getWriteRequestCount();<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>            } else {<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>              // Make sure all regions have been closed -- some regions may<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>              // have not got it because we were splitting at the time of<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>              // the call to closeUserRegions.<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>              closeUserRegions(this.abortRequested);<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span>            }<a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>          }<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>        }<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span>        long now = System.currentTimeMillis();<a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>        if ((now - lastMsg) &gt;= msgInterval) {<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>          tryRegionServerReport(lastMsg, now);<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>          lastMsg = System.currentTimeMillis();<a name="line.1026"></a>
+<span class="sourceLineNo">902</span>    waitForMasterActive();<a name="line.902"></a>
+<span class="sourceLineNo">903</span>    if (isStopped() || isAborted()) {<a name="line.903"></a>
+<span class="sourceLineNo">904</span>      return; // No need for further initialization<a name="line.904"></a>
+<span class="sourceLineNo">905</span>    }<a name="line.905"></a>
+<span class="sourceLineNo">906</span><a name="line.906"></a>
+<span class="sourceLineNo">907</span>    // watch for snapshots and other procedures<a name="line.907"></a>
+<span class="sourceLineNo">908</span>    try {<a name="line.908"></a>
+<span class="sourceLineNo">909</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.909"></a>
+<span class="sourceLineNo">910</span>      rspmHost.loadProcedures(conf);<a name="line.910"></a>
+<span class="sourceLineNo">911</span>      rspmHost.initialize(this);<a name="line.911"></a>
+<span class="sourceLineNo">912</span>    } catch (KeeperException e) {<a name="line.912"></a>
+<span class="sourceLineNo">913</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.913"></a>
+<span class="sourceLineNo">914</span>    }<a name="line.914"></a>
+<span class="sourceLineNo">915</span>  }<a name="line.915"></a>
+<span class="sourceLineNo">916</span><a name="line.916"></a>
+<span class="sourceLineNo">917</span>  /**<a name="line.917"></a>
+<span class="sourceLineNo">918</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.918"></a>
+<span class="sourceLineNo">919</span>   * if the region server is shut down<a name="line.919"></a>
+<span class="sourceLineNo">920</span>   * @param tracker znode tracker to use<a name="line.920"></a>
+<span class="sourceLineNo">921</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.921"></a>
+<span class="sourceLineNo">922</span>   * @throws InterruptedException<a name="line.922"></a>
+<span class="sourceLineNo">923</span>   */<a name="line.923"></a>
+<span class="sourceLineNo">924</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.924"></a>
+<span class="sourceLineNo">925</span>      throws IOException, InterruptedException {<a name="line.925"></a>
+<span class="sourceLineNo">926</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.926"></a>
+<span class="sourceLineNo">927</span>      if (this.stopped) {<a name="line.927"></a>
+<span class="sourceLineNo">928</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.928"></a>
+<span class="sourceLineNo">929</span>      }<a name="line.929"></a>
+<span class="sourceLineNo">930</span>    }<a name="line.930"></a>
+<span class="sourceLineNo">931</span>  }<a name="line.931"></a>
+<span class="sourceLineNo">932</span><a name="line.932"></a>
+<span class="sourceLineNo">933</span>  /**<a name="line.933"></a>
+<span class="sourceLineNo">934</span>   * @return True if the cluster is up.<a name="line.934"></a>
+<span class="sourceLineNo">935</span>   */<a name="line.935"></a>
+<span class="sourceLineNo">936</span>  @Override<a name="line.936"></a>
+<span class="sourceLineNo">937</span>  public boolean isClusterUp() {<a name="line.937"></a>
+<span class="sourceLineNo">938</span>    return this.masterless ||<a name="line.938"></a>
+<span class="sourceLineNo">939</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.939"></a>
+<span class="sourceLineNo">940</span>  }<a name="line.940"></a>
+<span class="sourceLineNo">941</span><a name="line.941"></a>
+<span class="sourceLineNo">942</span>  /**<a name="line.942"></a>
+<span class="sourceLineNo">943</span>   * The HRegionServer sticks in this loop until closed.<a name="line.943"></a>
+<span class="sourceLineNo">944</span>   */<a name="line.944"></a>
+<span class="sourceLineNo">945</span>  @Override<a name="line.945"></a>
+<span class="sourceLineNo">946</span>  public void run() {<a name="line.946"></a>
+<span class="sourceLineNo">947</span>    try {<a name="line.947"></a>
+<span class="sourceLineNo">948</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.948"></a>
+<span class="sourceLineNo">949</span>      preRegistrationInitialization();<a name="line.949"></a>
+<span class="sourceLineNo">950</span>    } catch (Throwable e) {<a name="line.950"></a>
+<span class="sourceLineNo">951</span>      abort("Fatal exception during initialization", e);<a name="line.951"></a>
+<span class="sourceLineNo">952</span>    }<a name="line.952"></a>
+<span class="sourceLineNo">953</span><a name="line.953"></a>
+<span class="sourceLineNo">954</span>    try {<a name="line.954"></a>
+<span class="sourceLineNo">955</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.957"></a>
+<span class="sourceLineNo">958</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.958"></a>
+<span class="sourceLineNo">959</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.959"></a>
+<span class="sourceLineNo">960</span>      }<a name="line.960"></a>
+<span class="sourceLineNo">961</span><a name="line.961"></a>
+<span class="sourceLineNo">962</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.962"></a>
+<span class="sourceLineNo">963</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.963"></a>
+<span class="sourceLineNo">964</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.964"></a>
+<span class="sourceLineNo">965</span>      LOG.debug("About to register with Master.");<a name="line.965"></a>
+<span class="sourceLineNo">966</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.966"></a>
+<span class="sourceLineNo">967</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.967"></a>
+<span class="sourceLineNo">968</span>      RetryCounter rc = rcf.create();<a name="line.968"></a>
+<span class="sourceLineNo">969</span>      while (keepLooping()) {<a name="line.969"></a>
+<span class="sourceLineNo">970</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.970"></a>
+<span class="sourceLineNo">971</span>        if (w == null) {<a name="line.971"></a>
+<span class="sourceLineNo">972</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.972"></a>
+<span class="sourceLineNo">973</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.973"></a>
+<span class="sourceLineNo">974</span>          this.sleeper.sleep(sleepTime);<a name="line.974"></a>
+<span class="sourceLineNo">975</span>        } else {<a name="line.975"></a>
+<span class="sourceLineNo">976</span>          handleReportForDutyResponse(w);<a name="line.976"></a>
+<span class="sourceLineNo">977</span>          break;<a name="line.977"></a>
+<span class="sourceLineNo">978</span>        }<a name="line.978"></a>
+<span class="sourceLineNo">979</span>      }<a name="line.979"></a>
+<span class="sourceLineNo">980</span><a name="line.980"></a>
+<span class="sourceLineNo">981</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.981"></a>
+<span class="sourceLineNo">982</span>        // start the snapshot handler and other procedure handlers,<a name="line.982"></a>
+<span class="sourceLineNo">983</span>        // since the server is ready to run<a name="line.983"></a>
+<span class="sourceLineNo">984</span>        if (this.rspmHost != null) {<a name="line.984"></a>
+<span class="sourceLineNo">985</span>          this.rspmHost.start();<a name="line.985"></a>
+<span class="sourceLineNo">986</span>        }<a name="line.986"></a>
+<span class="sourceLineNo">987</span>        // Start the Quota Manager<a name="line.987"></a>
+<span class="sourceLineNo">988</span>        if (this.rsQuotaManager != null) {<a name="line.988"></a>
+<span class="sourceLineNo">989</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.989"></a>
+<span class="sourceLineNo">990</span>        }<a name="line.990"></a>
+<span class="sourceLineNo">991</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.991"></a>
+<span class="sourceLineNo">992</span>          this.rsSpaceQuotaManager.start();<a name="line.992"></a>
+<span class="sourceLineNo">993</span>        }<a name="line.993"></a>
+<span class="sourceLineNo">994</span>      }<a name="line.994"></a>
+<span class="sourceLineNo">995</span><a name="line.995"></a>
+<span class="sourceLineNo">996</span>      // We registered with the Master.  Go into run mode.<a name="line.996"></a>
+<span class="sourceLineNo">997</span>      long lastMsg = System.currentTimeMillis();<a name="line.997"></a>
+<span class="sourceLineNo">998</span>      long oldRequestCount = -1;<a name="line.998"></a>
+<span class="sourceLineNo">999</span>      // The main run loop.<a name="line.999"></a>
+<span class="sourceLineNo">1000</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.1000"></a>
+<span class="sourceLineNo">1001</span>        if (!isClusterUp()) {<a name="line.1001"></a>
+<span class="sourceLineNo">1002</span>          if (isOnlineRegionsEmpty()) {<a name="line.1002"></a>
+<span class="sourceLineNo">1003</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.1003"></a>
+<span class="sourceLineNo">1004</span>          } else if (!this.stopping) {<a name="line.1004"></a>
+<span class="sourceLineNo">1005</span>            this.stopping = true;<a name="line.1005"></a>
+<span class="sourceLineNo">1006</span>            LOG.info("Closing user regions");<a name="line.1006"></a>
+<span class="sourceLineNo">1007</span>            closeUserRegions(this.abortRequested);<a name="line.1007"></a>
+<span class="sourceLineNo">1008</span>          } else if (this.stopping) {<a name="line.1008"></a>
+<span class="sourceLineNo">1009</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1009"></a>
+<span class="sourceLineNo">1010</span>            if (allUserRegionsOffline) {<a name="line.1010"></a>
+<span class="sourceLineNo">1011</span>              // Set stopped if no more write requests tp meta tables<a name="line.1011"></a>
+<span class="sourceLineNo">1012</span>              // since last time we went around the loop.  Any open<a name="line.1012"></a>
+<span class="sourceLineNo">1013</span>              // meta regions will be closed on our way out.<a name="line.1013"></a>
+<span class="sourceLineNo">1014</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1014"></a>
+<span class="sourceLineNo">1015</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1015"></a>
+<span class="sourceLineNo">1016</span>                break;<a name="line.1016"></a>
+<span class="sourceLineNo">1017</span>              }<a name="line.1017"></a>
+<span class="sourceLineNo">1018</span>              oldRequestCount = getWriteRequestCount();<a name="line.1018"></a>
+<span class="sourceLineNo">1019</span>            } else {<a name="line.1019"></a>
+<span class="sourceLineNo">1020</span>              // Make sure all regions have been closed -- some regions may<a name="line.1020"></a>
+<span class="sourceLineNo">1021</span>              // have not got it because we were splitting at the time of<a name="line.1021"></a>
+<span class="sourceLineNo">1022</span>              // the call to closeUserRegions.<a name="line.1022"></a>
+<span class="sourceLineNo">1023</span>              closeUserRegions(this.abortRequested);<a name="line.1023"></a>
+<span class="sourceLineNo">1024</span>            }<a name="line.1024"></a>
+<span class="sourceLineNo">1025</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1025"></a>
+<span class="sourceLineNo">1026</span>          }<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        }<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>        if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          this.sleeper.sleep();<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>        }<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>      } // for<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>    } catch (Throwable t) {<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>      if (!rpcServices.checkOOME(t)) {<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>        String prefix = t instanceof YouAreDeadException? "": "Unhandled: ";<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        abort(prefix + t.getMessage(), t);<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>      }<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>    }<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span><a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>    if (abortRequested) {<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>      Timer abortMonitor = new Timer("Abort regionserver monitor", true);<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>      TimerTask abortTimeoutTask = null;<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>      try {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>        abortTimeoutTask =<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>            Class.forName(conf.get(ABORT_TIMEOUT_TASK, SystemExitWhenAbortTimeout.class.getName()))<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>                .asSubclass(TimerTask.class).getDeclaredConstructor().newInstance();<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>      } catch (Exception e) {<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        LOG.warn("Initialize abort timeout task failed", e);<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>      }<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>  

<TRUNCATED>

[14/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
index 6369c27..ea05301 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
@@ -603,3251 +603,3256 @@
 <span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
 <span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
 <span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.599"></a>
+<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span><a name="line.599"></a>
 <span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
 <span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      cacheConfig = new CacheConfig(conf, !isMasterNotCarryTable);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      mobCacheConfig = new MobCacheConfig(conf, !isMasterNotCarryTable);<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        @Override<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>        }<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      };<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>      initializeFileSystem();<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.612"></a>
-<span class="sourceLineNo">613</span><a name="line.613"></a>
-<span class="sourceLineNo">614</span>      this.configurationManager = new ConfigurationManager();<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.615"></a>
-<span class="sourceLineNo">616</span><a name="line.616"></a>
-<span class="sourceLineNo">617</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>        // Open connection to zookeeper and set primary watcher<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.620"></a>
-<span class="sourceLineNo">621</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.621"></a>
-<span class="sourceLineNo">622</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.622"></a>
-<span class="sourceLineNo">623</span>        if (!this.masterless) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>          masterAddressTracker.start();<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span>          clusterStatusTracker.start();<a name="line.630"></a>
-<span class="sourceLineNo">631</span>        } else {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker = null;<a name="line.632"></a>
-<span class="sourceLineNo">633</span>          clusterStatusTracker = null;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>        }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      } else {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        zooKeeper = null;<a name="line.636"></a>
-<span class="sourceLineNo">637</span>        masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>        clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>      }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      this.rpcServices.start(zooKeeper);<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.641"></a>
-<span class="sourceLineNo">642</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      // class HRS. TODO.<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      this.choreService = new ChoreService(getName(), true);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      this.executorService = new ExecutorService(getName());<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      putUpWebUI();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    } catch (Throwable t) {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      // cause of failed startup is lost.<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      LOG.error("Failed construction RegionServer", t);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      throw t;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>  }<a name="line.655"></a>
-<span class="sourceLineNo">656</span><a name="line.656"></a>
-<span class="sourceLineNo">657</span>  // HMaster should override this method to load the specific config for master<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.662"></a>
-<span class="sourceLineNo">663</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.663"></a>
-<span class="sourceLineNo">664</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>        throw new IOException(msg);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      } else {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return rpcServices.isa.getHostName();<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      }<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    } else {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      return hostname;<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    }<a name="line.671"></a>
-<span class="sourceLineNo">672</span>  }<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>  /**<a name="line.674"></a>
-<span class="sourceLineNo">675</span>   * If running on Windows, do windows-specific setup.<a name="line.675"></a>
-<span class="sourceLineNo">676</span>   */<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        @Override<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        public void handle(Signal signal) {<a name="line.681"></a>
-<span class="sourceLineNo">682</span>          conf.reloadConfiguration();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>          cm.notifyAllObservers(conf);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      });<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    NettyEventLoopGroupConfig nelgc =<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    return nelgc;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>  }<a name="line.696"></a>
-<span class="sourceLineNo">697</span><a name="line.697"></a>
-<span class="sourceLineNo">698</span>  private void initializeFileSystem() throws IOException {<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    // (unless all is set to defaults).<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    return new FSTableDescriptors(this.conf,<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.716"></a>
+<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
+<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
+<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
+<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
+<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
+<span class="sourceLineNo">615</span><a name="line.615"></a>
+<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
+<span class="sourceLineNo">618</span><a name="line.618"></a>
+<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
+<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
+<span class="sourceLineNo">621</span><a name="line.621"></a>
+<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
+<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
+<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
+<span class="sourceLineNo">630</span><a name="line.630"></a>
+<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
+<span class="sourceLineNo">633</span><a name="line.633"></a>
+<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
+<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
+<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
+<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
+<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
+<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
+<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
+<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
+<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
+<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
+<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
+<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
+<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
+<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
+<span class="sourceLineNo">661</span><a name="line.661"></a>
+<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
+<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
+<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
+<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
+<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
+<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
+<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
+<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
+<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
+<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
+<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
+<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
+<span class="sourceLineNo">678</span><a name="line.678"></a>
+<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
+<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
+<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
+<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
+<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
+<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
+<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
+<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
+<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
+<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
+<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
+<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
+<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
+<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
+<span class="sourceLineNo">693</span><a name="line.693"></a>
+<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
+<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
+<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
+<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
+<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
+<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
+<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
+<span class="sourceLineNo">702</span><a name="line.702"></a>
+<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
+<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
+<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
+<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
+<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
+<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
+<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
+<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
+<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
+<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
 <span class="sourceLineNo">717</span>  }<a name="line.717"></a>
 <span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return null;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    user.login("hbase.regionserver.keytab.file",<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.725"></a>
+<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
+<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
+<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
+<span class="sourceLineNo">723</span><a name="line.723"></a>
+<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
+<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
 <span class="sourceLineNo">726</span>  }<a name="line.726"></a>
 <span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span><a name="line.728"></a>
-<span class="sourceLineNo">729</span>  /**<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   * Wait for an active Master.<a name="line.730"></a>
-<span class="sourceLineNo">731</span>   * See override in Master superclass for how it is used.<a name="line.731"></a>
-<span class="sourceLineNo">732</span>   */<a name="line.732"></a>
-<span class="sourceLineNo">733</span>  protected void waitForMasterActive() {}<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>  protected String getProcessName() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    return REGIONSERVER;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>  protected boolean canCreateBaseZNode() {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    return this.masterless;<a name="line.740"></a>
-<span class="sourceLineNo">741</span>  }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>  protected boolean canUpdateTableDescriptor() {<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    return false;<a name="line.744"></a>
-<span class="sourceLineNo">745</span>  }<a name="line.745"></a>
-<span class="sourceLineNo">746</span><a name="line.746"></a>
-<span class="sourceLineNo">747</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    return new RSRpcServices(this);<a name="line.748"></a>
-<span class="sourceLineNo">749</span>  }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>  protected void configureInfoServer() {<a name="line.751"></a>
-<span class="sourceLineNo">752</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.753"></a>
+<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
+<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
+<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
+<span class="sourceLineNo">732</span><a name="line.732"></a>
+<span class="sourceLineNo">733</span><a name="line.733"></a>
+<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
+<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
+<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
+<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
+<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">739</span><a name="line.739"></a>
+<span class="sourceLineNo">740</span>  protected String getProcessName() {<a name="line.740"></a>
+<span class="sourceLineNo">741</span>    return REGIONSERVER;<a name="line.741"></a>
+<span class="sourceLineNo">742</span>  }<a name="line.742"></a>
+<span class="sourceLineNo">743</span><a name="line.743"></a>
+<span class="sourceLineNo">744</span>  protected boolean canCreateBaseZNode() {<a name="line.744"></a>
+<span class="sourceLineNo">745</span>    return this.masterless;<a name="line.745"></a>
+<span class="sourceLineNo">746</span>  }<a name="line.746"></a>
+<span class="sourceLineNo">747</span><a name="line.747"></a>
+<span class="sourceLineNo">748</span>  protected boolean canUpdateTableDescriptor() {<a name="line.748"></a>
+<span class="sourceLineNo">749</span>    return false;<a name="line.749"></a>
+<span class="sourceLineNo">750</span>  }<a name="line.750"></a>
+<span class="sourceLineNo">751</span><a name="line.751"></a>
+<span class="sourceLineNo">752</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.752"></a>
+<span class="sourceLineNo">753</span>    return new RSRpcServices(this);<a name="line.753"></a>
 <span class="sourceLineNo">754</span>  }<a name="line.754"></a>
 <span class="sourceLineNo">755</span><a name="line.755"></a>
-<span class="sourceLineNo">756</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>    return RSDumpServlet.class;<a name="line.757"></a>
-<span class="sourceLineNo">758</span>  }<a name="line.758"></a>
-<span class="sourceLineNo">759</span><a name="line.759"></a>
-<span class="sourceLineNo">760</span>  @Override<a name="line.760"></a>
-<span class="sourceLineNo">761</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    /*<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * No stacking of instances is allowed for a single executorService name<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        instance.getDescriptorForType();<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.769"></a>
-<span class="sourceLineNo">770</span>          + " already registered, rejecting request from " + instance);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      return false;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>    }<a name="line.772"></a>
-<span class="sourceLineNo">773</span><a name="line.773"></a>
-<span class="sourceLineNo">774</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>    if (LOG.isDebugEnabled()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.776"></a>
+<span class="sourceLineNo">756</span>  protected void configureInfoServer() {<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.758"></a>
+<span class="sourceLineNo">759</span>  }<a name="line.759"></a>
+<span class="sourceLineNo">760</span><a name="line.760"></a>
+<span class="sourceLineNo">761</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.761"></a>
+<span class="sourceLineNo">762</span>    return RSDumpServlet.class;<a name="line.762"></a>
+<span class="sourceLineNo">763</span>  }<a name="line.763"></a>
+<span class="sourceLineNo">764</span><a name="line.764"></a>
+<span class="sourceLineNo">765</span>  @Override<a name="line.765"></a>
+<span class="sourceLineNo">766</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>    /*<a name="line.767"></a>
+<span class="sourceLineNo">768</span>     * No stacking of instances is allowed for a single executorService name<a name="line.768"></a>
+<span class="sourceLineNo">769</span>     */<a name="line.769"></a>
+<span class="sourceLineNo">770</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.770"></a>
+<span class="sourceLineNo">771</span>        instance.getDescriptorForType();<a name="line.771"></a>
+<span class="sourceLineNo">772</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.774"></a>
+<span class="sourceLineNo">775</span>          + " already registered, rejecting request from " + instance);<a name="line.775"></a>
+<span class="sourceLineNo">776</span>      return false;<a name="line.776"></a>
 <span class="sourceLineNo">777</span>    }<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    return true;<a name="line.778"></a>
-<span class="sourceLineNo">779</span>  }<a name="line.779"></a>
-<span class="sourceLineNo">780</span><a name="line.780"></a>
-<span class="sourceLineNo">781</span>  /**<a name="line.781"></a>
-<span class="sourceLineNo">782</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   */<a name="line.784"></a>
-<span class="sourceLineNo">785</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    Configuration conf = this.conf;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.787"></a>
-<span class="sourceLineNo">788</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.788"></a>
-<span class="sourceLineNo">789</span>      // the conf and unset the client ZK related properties<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      conf = new Configuration(this.conf);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.791"></a>
-<span class="sourceLineNo">792</span>    }<a name="line.792"></a>
-<span class="sourceLineNo">793</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    // and remote invocations.<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.799"></a>
-<span class="sourceLineNo">800</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.801"></a>
-<span class="sourceLineNo">802</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    return conn;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>  }<a name="line.805"></a>
-<span class="sourceLineNo">806</span><a name="line.806"></a>
-<span class="sourceLineNo">807</span>  /**<a name="line.807"></a>
-<span class="sourceLineNo">808</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.808"></a>
-<span class="sourceLineNo">809</span>   * @param c<a name="line.809"></a>
-<span class="sourceLineNo">810</span>   * @throws IOException<a name="line.810"></a>
-<span class="sourceLineNo">811</span>   */<a name="line.811"></a>
-<span class="sourceLineNo">812</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>    // check to see if the codec list is available:<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    if (codecs == null) return;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>    for (String codec : codecs) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.817"></a>
-<span class="sourceLineNo">818</span>        throw new IOException("Compression codec " + codec +<a name="line.818"></a>
-<span class="sourceLineNo">819</span>          " not supported, aborting RS construction");<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      }<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    }<a name="line.821"></a>
-<span class="sourceLineNo">822</span>  }<a name="line.822"></a>
-<span class="sourceLineNo">823</span><a name="line.823"></a>
-<span class="sourceLineNo">824</span>  public String getClusterId() {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>    return this.clusterId;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>  }<a name="line.826"></a>
-<span class="sourceLineNo">827</span><a name="line.827"></a>
-<span class="sourceLineNo">828</span>  /**<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * Setup our cluster connection if not already initialized.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   */<a name="line.830"></a>
-<span class="sourceLineNo">831</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>    if (clusterConnection == null) {<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      clusterConnection = createClusterConnection();<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      metaTableLocator = new MetaTableLocator();<a name="line.834"></a>
-<span class="sourceLineNo">835</span>    }<a name="line.835"></a>
-<span class="sourceLineNo">836</span>  }<a name="line.836"></a>
-<span class="sourceLineNo">837</span><a name="line.837"></a>
-<span class="sourceLineNo">838</span>  /**<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.839"></a>
-<span class="sourceLineNo">840</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.840"></a>
-<span class="sourceLineNo">841</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.841"></a>
-<span class="sourceLineNo">842</span>   */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>  private void preRegistrationInitialization() {<a name="line.843"></a>
-<span class="sourceLineNo">844</span>    try {<a name="line.844"></a>
-<span class="sourceLineNo">845</span>      initializeZooKeeper();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>      setupClusterConnection();<a name="line.846"></a>
-<span class="sourceLineNo">847</span>      // Setup RPC client for master communication<a name="line.847"></a>
-<span class="sourceLineNo">848</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    } catch (Throwable t) {<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      // Call stop if error or process will stick around for ever since server<a name="line.851"></a>
-<span class="sourceLineNo">852</span>      // puts up non-daemon threads.<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      this.rpcServices.stop();<a name="line.853"></a>
-<span class="sourceLineNo">854</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.854"></a>
-<span class="sourceLineNo">855</span>    }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>  }<a name="line.856"></a>
-<span class="sourceLineNo">857</span><a name="line.857"></a>
-<span class="sourceLineNo">858</span>  /**<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * &lt;p&gt;<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   * Finally open long-living server short-circuit connection.<a name="line.862"></a>
-<span class="sourceLineNo">863</span>   */<a name="line.863"></a>
-<span class="sourceLineNo">864</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    justification="cluster Id znode read would give us correct response")<a name="line.865"></a>
-<span class="sourceLineNo">866</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    // Nothing to do in here if no Master in the mix.<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    if (this.masterless) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      return;<a name="line.869"></a>
-<span class="sourceLineNo">870</span>    }<a name="line.870"></a>
-<span class="sourceLineNo">871</span><a name="line.871"></a>
-<span class="sourceLineNo">872</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.872"></a>
-<span class="sourceLineNo">873</span>    // block until a master is available.  No point in starting up if no master<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // running.<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.875"></a>
+<span class="sourceLineNo">778</span><a name="line.778"></a>
+<span class="sourceLineNo">779</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    if (LOG.isDebugEnabled()) {<a name="line.780"></a>
+<span class="sourceLineNo">781</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.781"></a>
+<span class="sourceLineNo">782</span>    }<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    return true;<a name="line.783"></a>
+<span class="sourceLineNo">784</span>  }<a name="line.784"></a>
+<span class="sourceLineNo">785</span><a name="line.785"></a>
+<span class="sourceLineNo">786</span>  /**<a name="line.786"></a>
+<span class="sourceLineNo">787</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.787"></a>
+<span class="sourceLineNo">788</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.788"></a>
+<span class="sourceLineNo">789</span>   */<a name="line.789"></a>
+<span class="sourceLineNo">790</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.790"></a>
+<span class="sourceLineNo">791</span>    Configuration conf = this.conf;<a name="line.791"></a>
+<span class="sourceLineNo">792</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.792"></a>
+<span class="sourceLineNo">793</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.793"></a>
+<span class="sourceLineNo">794</span>      // the conf and unset the client ZK related properties<a name="line.794"></a>
+<span class="sourceLineNo">795</span>      conf = new Configuration(this.conf);<a name="line.795"></a>
+<span class="sourceLineNo">796</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.796"></a>
+<span class="sourceLineNo">797</span>    }<a name="line.797"></a>
+<span class="sourceLineNo">798</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.798"></a>
+<span class="sourceLineNo">799</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.799"></a>
+<span class="sourceLineNo">800</span>    // and remote invocations.<a name="line.800"></a>
+<span class="sourceLineNo">801</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.801"></a>
+<span class="sourceLineNo">802</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.802"></a>
+<span class="sourceLineNo">803</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.803"></a>
+<span class="sourceLineNo">804</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.804"></a>
+<span class="sourceLineNo">805</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.805"></a>
+<span class="sourceLineNo">806</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.806"></a>
+<span class="sourceLineNo">807</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.807"></a>
+<span class="sourceLineNo">808</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.808"></a>
+<span class="sourceLineNo">809</span>    return conn;<a name="line.809"></a>
+<span class="sourceLineNo">810</span>  }<a name="line.810"></a>
+<span class="sourceLineNo">811</span><a name="line.811"></a>
+<span class="sourceLineNo">812</span>  /**<a name="line.812"></a>
+<span class="sourceLineNo">813</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.813"></a>
+<span class="sourceLineNo">814</span>   * @param c<a name="line.814"></a>
+<span class="sourceLineNo">815</span>   * @throws IOException<a name="line.815"></a>
+<span class="sourceLineNo">816</span>   */<a name="line.816"></a>
+<span class="sourceLineNo">817</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.817"></a>
+<span class="sourceLineNo">818</span>    // check to see if the codec list is available:<a name="line.818"></a>
+<span class="sourceLineNo">819</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.819"></a>
+<span class="sourceLineNo">820</span>    if (codecs == null) return;<a name="line.820"></a>
+<span class="sourceLineNo">821</span>    for (String codec : codecs) {<a name="line.821"></a>
+<span class="sourceLineNo">822</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.822"></a>
+<span class="sourceLineNo">823</span>        throw new IOException("Compression codec " + codec +<a name="line.823"></a>
+<span class="sourceLineNo">824</span>          " not supported, aborting RS construction");<a name="line.824"></a>
+<span class="sourceLineNo">825</span>      }<a name="line.825"></a>
+<span class="sourceLineNo">826</span>    }<a name="line.826"></a>
+<span class="sourceLineNo">827</span>  }<a name="line.827"></a>
+<span class="sourceLineNo">828</span><a name="line.828"></a>
+<span class="sourceLineNo">829</span>  public String getClusterId() {<a name="line.829"></a>
+<span class="sourceLineNo">830</span>    return this.clusterId;<a name="line.830"></a>
+<span class="sourceLineNo">831</span>  }<a name="line.831"></a>
+<span class="sourceLineNo">832</span><a name="line.832"></a>
+<span class="sourceLineNo">833</span>  /**<a name="line.833"></a>
+<span class="sourceLineNo">834</span>   * Setup our cluster connection if not already initialized.<a name="line.834"></a>
+<span class="sourceLineNo">835</span>   */<a name="line.835"></a>
+<span class="sourceLineNo">836</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.836"></a>
+<span class="sourceLineNo">837</span>    if (clusterConnection == null) {<a name="line.837"></a>
+<span class="sourceLineNo">838</span>      clusterConnection = createClusterConnection();<a name="line.838"></a>
+<span class="sourceLineNo">839</span>      metaTableLocator = new MetaTableLocator();<a name="line.839"></a>
+<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
+<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
+<span class="sourceLineNo">842</span><a name="line.842"></a>
+<span class="sourceLineNo">843</span>  /**<a name="line.843"></a>
+<span class="sourceLineNo">844</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.844"></a>
+<span class="sourceLineNo">845</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.845"></a>
+<span class="sourceLineNo">846</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.846"></a>
+<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
+<span class="sourceLineNo">848</span>  private void preRegistrationInitialization() {<a name="line.848"></a>
+<span class="sourceLineNo">849</span>    try {<a name="line.849"></a>
+<span class="sourceLineNo">850</span>      initializeZooKeeper();<a name="line.850"></a>
+<span class="sourceLineNo">851</span>      setupClusterConnection();<a name="line.851"></a>
+<span class="sourceLineNo">852</span>      // Setup RPC client for master communication<a name="line.852"></a>
+<span class="sourceLineNo">853</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.853"></a>
+<span class="sourceLineNo">854</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.854"></a>
+<span class="sourceLineNo">855</span>    } catch (Throwable t) {<a name="line.855"></a>
+<span class="sourceLineNo">856</span>      // Call stop if error or process will stick around for ever since server<a name="line.856"></a>
+<span class="sourceLineNo">857</span>      // puts up non-daemon threads.<a name="line.857"></a>
+<span class="sourceLineNo">858</span>      this.rpcServices.stop();<a name="line.858"></a>
+<span class="sourceLineNo">859</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.859"></a>
+<span class="sourceLineNo">860</span>    }<a name="line.860"></a>
+<span class="sourceLineNo">861</span>  }<a name="line.861"></a>
+<span class="sourceLineNo">862</span><a name="line.862"></a>
+<span class="sourceLineNo">863</span>  /**<a name="line.863"></a>
+<span class="sourceLineNo">864</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.864"></a>
+<span class="sourceLineNo">865</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.865"></a>
+<span class="sourceLineNo">866</span>   * &lt;p&gt;<a name="line.866"></a>
+<span class="sourceLineNo">867</span>   * Finally open long-living server short-circuit connection.<a name="line.867"></a>
+<span class="sourceLineNo">868</span>   */<a name="line.868"></a>
+<span class="sourceLineNo">869</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.869"></a>
+<span class="sourceLineNo">870</span>    justification="cluster Id znode read would give us correct response")<a name="line.870"></a>
+<span class="sourceLineNo">871</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.871"></a>
+<span class="sourceLineNo">872</span>    // Nothing to do in here if no Master in the mix.<a name="line.872"></a>
+<span class="sourceLineNo">873</span>    if (this.masterless) {<a name="line.873"></a>
+<span class="sourceLineNo">874</span>      return;<a name="line.874"></a>
+<span class="sourceLineNo">875</span>    }<a name="line.875"></a>
 <span class="sourceLineNo">876</span><a name="line.876"></a>
-<span class="sourceLineNo">877</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    // when ready.<a name="line.878"></a>
-<span class="sourceLineNo">879</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.881"></a>
-<span class="sourceLineNo">882</span>    if (clusterId == null) {<a name="line.882"></a>
-<span class="sourceLineNo">883</span>      // Retrieve clusterId<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      // Since cluster status is now up<a name="line.884"></a>
-<span class="sourceLineNo">885</span>      // ID should have already been set by HMaster<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      try {<a name="line.886"></a>
-<span class="sourceLineNo">887</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>        if (clusterId == null) {<a name="line.888"></a>
-<span class="sourceLineNo">889</span>          this.abort("Cluster ID has not been set");<a name="line.889"></a>
-<span class="sourceLineNo">890</span>        }<a name="line.890"></a>
-<span class="sourceLineNo">891</span>        LOG.info("ClusterId : " + clusterId);<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      } catch (KeeperException e) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.893"></a>
-<span class="sourceLineNo">894</span>      }<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    }<a name="line.895"></a>
-<span class="sourceLineNo">896</span><a name="line.896"></a>
-<span class="sourceLineNo">897</span>    waitForMasterActive();<a name="line.897"></a>
-<span class="sourceLineNo">898</span>    if (isStopped() || isAborted()) {<a name="line.898"></a>
-<span class="sourceLineNo">899</span>      return; // No need for further initialization<a name="line.899"></a>
+<span class="sourceLineNo">877</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.877"></a>
+<span class="sourceLineNo">878</span>    // block until a master is available.  No point in starting up if no master<a name="line.878"></a>
+<span class="sourceLineNo">879</span>    // running.<a name="line.879"></a>
+<span class="sourceLineNo">880</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.880"></a>
+<span class="sourceLineNo">881</span><a name="line.881"></a>
+<span class="sourceLineNo">882</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.882"></a>
+<span class="sourceLineNo">883</span>    // when ready.<a name="line.883"></a>
+<span class="sourceLineNo">884</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.884"></a>
+<span class="sourceLineNo">885</span><a name="line.885"></a>
+<span class="sourceLineNo">886</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.886"></a>
+<span class="sourceLineNo">887</span>    if (clusterId == null) {<a name="line.887"></a>
+<span class="sourceLineNo">888</span>      // Retrieve clusterId<a name="line.888"></a>
+<span class="sourceLineNo">889</span>      // Since cluster status is now up<a name="line.889"></a>
+<span class="sourceLineNo">890</span>      // ID should have already been set by HMaster<a name="line.890"></a>
+<span class="sourceLineNo">891</span>      try {<a name="line.891"></a>
+<span class="sourceLineNo">892</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.892"></a>
+<span class="sourceLineNo">893</span>        if (clusterId == null) {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>          this.abort("Cluster ID has not been set");<a name="line.894"></a>
+<span class="sourceLineNo">895</span>        }<a name="line.895"></a>
+<span class="sourceLineNo">896</span>        LOG.info("ClusterId : " + clusterId);<a name="line.896"></a>
+<span class="sourceLineNo">897</span>      } catch (KeeperException e) {<a name="line.897"></a>
+<span class="sourceLineNo">898</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.898"></a>
+<span class="sourceLineNo">899</span>      }<a name="line.899"></a>
 <span class="sourceLineNo">900</span>    }<a name="line.900"></a>
 <span class="sourceLineNo">901</span><a name="line.901"></a>
-<span class="sourceLineNo">902</span>    // watch for snapshots and other procedures<a name="line.902"></a>
-<span class="sourceLineNo">903</span>    try {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>      rspmHost.loadProcedures(conf);<a name="line.905"></a>
-<span class="sourceLineNo">906</span>      rspmHost.initialize(this);<a name="line.906"></a>
-<span class="sourceLineNo">907</span>    } catch (KeeperException e) {<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.908"></a>
-<span class="sourceLineNo">909</span>    }<a name="line.909"></a>
-<span class="sourceLineNo">910</span>  }<a name="line.910"></a>
-<span class="sourceLineNo">911</span><a name="line.911"></a>
-<span class="sourceLineNo">912</span>  /**<a name="line.912"></a>
-<span class="sourceLineNo">913</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.913"></a>
-<span class="sourceLineNo">914</span>   * if the region server is shut down<a name="line.914"></a>
-<span class="sourceLineNo">915</span>   * @param tracker znode tracker to use<a name="line.915"></a>
-<span class="sourceLineNo">916</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.916"></a>
-<span class="sourceLineNo">917</span>   * @throws InterruptedException<a name="line.917"></a>
-<span class="sourceLineNo">918</span>   */<a name="line.918"></a>
-<span class="sourceLineNo">919</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      throws IOException, InterruptedException {<a name="line.920"></a>
-<span class="sourceLineNo">921</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>      if (this.stopped) {<a name="line.922"></a>
-<span class="sourceLineNo">923</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      }<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span>  }<a name="line.926"></a>
-<span class="sourceLineNo">927</span><a name="line.927"></a>
-<span class="sourceLineNo">928</span>  /**<a name="line.928"></a>
-<span class="sourceLineNo">929</span>   * @return True if the cluster is up.<a name="line.929"></a>
-<span class="sourceLineNo">930</span>   */<a name="line.930"></a>
-<span class="sourceLineNo">931</span>  @Override<a name="line.931"></a>
-<span class="sourceLineNo">932</span>  public boolean isClusterUp() {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    return this.masterless ||<a name="line.933"></a>
-<span class="sourceLineNo">934</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.934"></a>
-<span class="sourceLineNo">935</span>  }<a name="line.935"></a>
-<span class="sourceLineNo">936</span><a name="line.936"></a>
-<span class="sourceLineNo">937</span>  /**<a name="line.937"></a>
-<span class="sourceLineNo">938</span>   * The HRegionServer sticks in this loop until closed.<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   */<a name="line.939"></a>
-<span class="sourceLineNo">940</span>  @Override<a name="line.940"></a>
-<span class="sourceLineNo">941</span>  public void run() {<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    try {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.943"></a>
-<span class="sourceLineNo">944</span>      preRegistrationInitialization();<a name="line.944"></a>
-<span class="sourceLineNo">945</span>    } catch (Throwable e) {<a name="line.945"></a>
-<span class="sourceLineNo">946</span>      abort("Fatal exception during initialization", e);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    try {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.951"></a>
-<span class="sourceLineNo">952</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.952"></a>
-<span class="sourceLineNo">953</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.953"></a>
-<span class="sourceLineNo">954</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      }<a name="line.955"></a>
-<span class="sourceLineNo">956</span><a name="line.956"></a>
-<span class="sourceLineNo">957</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.957"></a>
-<span class="sourceLineNo">958</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      LOG.debug("About to register with Master.");<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.961"></a>
-<span class="sourceLineNo">962</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.962"></a>
-<span class="sourceLineNo">963</span>      RetryCounter rc = rcf.create();<a name="line.963"></a>
-<span class="sourceLineNo">964</span>      while (keepLooping()) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.965"></a>
-<span class="sourceLineNo">966</span>        if (w == null) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.967"></a>
-<span class="sourceLineNo">968</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.968"></a>
-<span class="sourceLineNo">969</span>          this.sleeper.sleep(sleepTime);<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        } else {<a name="line.970"></a>
-<span class="sourceLineNo">971</span>          handleReportForDutyResponse(w);<a name="line.971"></a>
-<span class="sourceLineNo">972</span>          break;<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        }<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span><a name="line.975"></a>
-<span class="sourceLineNo">976</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>        // start the snapshot handler and other procedure handlers,<a name="line.977"></a>
-<span class="sourceLineNo">978</span>        // since the server is ready to run<a name="line.978"></a>
-<span class="sourceLineNo">979</span>        if (this.rspmHost != null) {<a name="line.979"></a>
-<span class="sourceLineNo">980</span>          this.rspmHost.start();<a name="line.980"></a>
-<span class="sourceLineNo">981</span>        }<a name="line.981"></a>
-<span class="sourceLineNo">982</span>        // Start the Quota Manager<a name="line.982"></a>
-<span class="sourceLineNo">983</span>        if (this.rsQuotaManager != null) {<a name="line.983"></a>
-<span class="sourceLineNo">984</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.984"></a>
-<span class="sourceLineNo">985</span>        }<a name="line.985"></a>
-<span class="sourceLineNo">986</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.986"></a>
-<span class="sourceLineNo">987</span>          this.rsSpaceQuotaManager.start();<a name="line.987"></a>
-<span class="sourceLineNo">988</span>        }<a name="line.988"></a>
-<span class="sourceLineNo">989</span>      }<a name="line.989"></a>
-<span class="sourceLineNo">990</span><a name="line.990"></a>
-<span class="sourceLineNo">991</span>      // We registered with the Master.  Go into run mode.<a name="line.991"></a>
-<span class="sourceLineNo">992</span>      long lastMsg = System.currentTimeMillis();<a name="line.992"></a>
-<span class="sourceLineNo">993</span>      long oldRequestCount = -1;<a name="line.993"></a>
-<span class="sourceLineNo">994</span>      // The main run loop.<a name="line.994"></a>
-<span class="sourceLineNo">995</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.995"></a>
-<span class="sourceLineNo">996</span>        if (!isClusterUp()) {<a name="line.996"></a>
-<span class="sourceLineNo">997</span>          if (isOnlineRegionsEmpty()) {<a name="line.997"></a>
-<span class="sourceLineNo">998</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.998"></a>
-<span class="sourceLineNo">999</span>          } else if (!this.stopping) {<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>            this.stopping = true;<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>            LOG.info("Closing user regions");<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>            closeUserRegions(this.abortRequested);<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>          } else if (this.stopping) {<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>            if (allUserRegionsOffline) {<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>              // Set stopped if no more write requests tp meta tables<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span>              // since last time we went around the loop.  Any open<a name="line.1007"></a>
-<span class="sourceLineNo">1008</span>              // meta regions will be closed on our way out.<a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>                break;<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>              }<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>              oldRequestCount = getWriteRequestCount();<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>            } else {<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>              // Make sure all regions have been closed -- some regions may<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>              // have not got it because we were splitting at the time of<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>              // the call to closeUserRegions.<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>              closeUserRegions(this.abortRequested);<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span>            }<a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>          }<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>        }<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span>        long now = System.currentTimeMillis();<a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>        if ((now - lastMsg) &gt;= msgInterval) {<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>          tryRegionServerReport(lastMsg, now);<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>          lastMsg = System.currentTimeMillis();<a name="line.1026"></a>
+<span class="sourceLineNo">902</span>    waitForMasterActive();<a name="line.902"></a>
+<span class="sourceLineNo">903</span>    if (isStopped() || isAborted()) {<a name="line.903"></a>
+<span class="sourceLineNo">904</span>      return; // No need for further initialization<a name="line.904"></a>
+<span class="sourceLineNo">905</span>    }<a name="line.905"></a>
+<span class="sourceLineNo">906</span><a name="line.906"></a>
+<span class="sourceLineNo">907</span>    // watch for snapshots and other procedures<a name="line.907"></a>
+<span class="sourceLineNo">908</span>    try {<a name="line.908"></a>
+<span class="sourceLineNo">909</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.909"></a>
+<span class="sourceLineNo">910</span>      rspmHost.loadProcedures(conf);<a name="line.910"></a>
+<span class="sourceLineNo">911</span>      rspmHost.initialize(this);<a name="line.911"></a>
+<span class="sourceLineNo">912</span>    } catch (KeeperException e) {<a name="line.912"></a>
+<span class="sourceLineNo">913</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.913"></a>
+<span class="sourceLineNo">914</span>    }<a name="line.914"></a>
+<span class="sourceLineNo">915</span>  }<a name="line.915"></a>
+<span class="sourceLineNo">916</span><a name="line.916"></a>
+<span class="sourceLineNo">917</span>  /**<a name="line.917"></a>
+<span class="sourceLineNo">918</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.918"></a>
+<span class="sourceLineNo">919</span>   * if the region server is shut down<a name="line.919"></a>
+<span class="sourceLineNo">920</span>   * @param tracker znode tracker to use<a name="line.920"></a>
+<span class="sourceLineNo">921</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.921"></a>
+<span class="sourceLineNo">922</span>   * @throws InterruptedException<a name="line.922"></a>
+<span class="sourceLineNo">923</span>   */<a name="line.923"></a>
+<span class="sourceLineNo">924</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.924"></a>
+<span class="sourceLineNo">925</span>      throws IOException, InterruptedException {<a name="line.925"></a>
+<span class="sourceLineNo">926</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.926"></a>
+<span class="sourceLineNo">927</span>      if (this.stopped) {<a name="line.927"></a>
+<span class="sourceLineNo">928</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.928"></a>
+<span class="sourceLineNo">929</span>      }<a name="line.929"></a>
+<span class="sourceLineNo">930</span>    }<a name="line.930"></a>
+<span class="sourceLineNo">931</span>  }<a name="line.931"></a>
+<span class="sourceLineNo">932</span><a name="line.932"></a>
+<span class="sourceLineNo">933</span>  /**<a name="line.933"></a>
+<span class="sourceLineNo">934</span>   * @return True if the cluster is up.<a name="line.934"></a>
+<span class="sourceLineNo">935</span>   */<a name="line.935"></a>
+<span class="sourceLineNo">936</span>  @Override<a name="line.936"></a>
+<span class="sourceLineNo">937</span>  public boolean isClusterUp() {<a name="line.937"></a>
+<span class="sourceLineNo">938</span>    return this.masterless ||<a name="line.938"></a>
+<span class="sourceLineNo">939</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.939"></a>
+<span class="sourceLineNo">940</span>  }<a name="line.940"></a>
+<span class="sourceLineNo">941</span><a name="line.941"></a>
+<span class="sourceLineNo">942</span>  /**<a name="line.942"></a>
+<span class="sourceLineNo">943</span>   * The HRegionServer sticks in this loop until closed.<a name="line.943"></a>
+<span class="sourceLineNo">944</span>   */<a name="line.944"></a>
+<span class="sourceLineNo">945</span>  @Override<a name="line.945"></a>
+<span class="sourceLineNo">946</span>  public void run() {<a name="line.946"></a>
+<span class="sourceLineNo">947</span>    try {<a name="line.947"></a>
+<span class="sourceLineNo">948</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.948"></a>
+<span class="sourceLineNo">949</span>      preRegistrationInitialization();<a name="line.949"></a>
+<span class="sourceLineNo">950</span>    } catch (Throwable e) {<a name="line.950"></a>
+<span class="sourceLineNo">951</span>      abort("Fatal exception during initialization", e);<a name="line.951"></a>
+<span class="sourceLineNo">952</span>    }<a name="line.952"></a>
+<span class="sourceLineNo">953</span><a name="line.953"></a>
+<span class="sourceLineNo">954</span>    try {<a name="line.954"></a>
+<span class="sourceLineNo">955</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.957"></a>
+<span class="sourceLineNo">958</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.958"></a>
+<span class="sourceLineNo">959</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.959"></a>
+<span class="sourceLineNo">960</span>      }<a name="line.960"></a>
+<span class="sourceLineNo">961</span><a name="line.961"></a>
+<span class="sourceLineNo">962</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.962"></a>
+<span class="sourceLineNo">963</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.963"></a>
+<span class="sourceLineNo">964</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.964"></a>
+<span class="sourceLineNo">965</span>      LOG.debug("About to register with Master.");<a name="line.965"></a>
+<span class="sourceLineNo">966</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.966"></a>
+<span class="sourceLineNo">967</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.967"></a>
+<span class="sourceLineNo">968</span>      RetryCounter rc = rcf.create();<a name="line.968"></a>
+<span class="sourceLineNo">969</span>      while (keepLooping()) {<a name="line.969"></a>
+<span class="sourceLineNo">970</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.970"></a>
+<span class="sourceLineNo">971</span>        if (w == null) {<a name="line.971"></a>
+<span class="sourceLineNo">972</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.972"></a>
+<span class="sourceLineNo">973</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.973"></a>
+<span class="sourceLineNo">974</span>          this.sleeper.sleep(sleepTime);<a name="line.974"></a>
+<span class="sourceLineNo">975</span>        } else {<a name="line.975"></a>
+<span class="sourceLineNo">976</span>          handleReportForDutyResponse(w);<a name="line.976"></a>
+<span class="sourceLineNo">977</span>          break;<a name="line.977"></a>
+<span class="sourceLineNo">978</span>        }<a name="line.978"></a>
+<span class="sourceLineNo">979</span>      }<a name="line.979"></a>
+<span class="sourceLineNo">980</span><a name="line.980"></a>
+<span class="sourceLineNo">981</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.981"></a>
+<span class="sourceLineNo">982</span>        // start the snapshot handler and other procedure handlers,<a name="line.982"></a>
+<span class="sourceLineNo">983</span>        // since the server is ready to run<a name="line.983"></a>
+<span class="sourceLineNo">984</span>        if (this.rspmHost != null) {<a name="line.984"></a>
+<span class="sourceLineNo">985</span>          this.rspmHost.start();<a name="line.985"></a>
+<span class="sourceLineNo">986</span>        }<a name="line.986"></a>
+<span class="sourceLineNo">987</span>        // Start the Quota Manager<a name="line.987"></a>
+<span class="sourceLineNo">988</span>        if (this.rsQuotaManager != null) {<a name="line.988"></a>
+<span class="sourceLineNo">989</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.989"></a>
+<span class="sourceLineNo">990</span>        }<a name="line.990"></a>
+<span class="sourceLineNo">991</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.991"></a>
+<span class="sourceLineNo">992</span>          this.rsSpaceQuotaManager.start();<a name="line.992"></a>
+<span class="sourceLineNo">993</span>        }<a name="line.993"></a>
+<span class="sourceLineNo">994</span>      }<a name="line.994"></a>
+<span class="sourceLineNo">995</span><a name="line.995"></a>
+<span class="sourceLineNo">996</span>      // We registered with the Master.  Go into run mode.<a name="line.996"></a>
+<span class="sourceLineNo">997</span>      long lastMsg = System.currentTimeMillis();<a name="line.997"></a>
+<span class="sourceLineNo">998</span>      long oldRequestCount = -1;<a name="line.998"></a>
+<span class="sourceLineNo">999</span>      // The main run loop.<a name="line.999"></a>
+<span class="sourceLineNo">1000</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.1000"></a>
+<span class="sourceLineNo">1001</span>        if (!isClusterUp()) {<a name="line.1001"></a>
+<span class="sourceLineNo">1002</span>          if (isOnlineRegionsEmpty()) {<a name="line.1002"></a>
+<span class="sourceLineNo">1003</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.1003"></a>
+<span class="sourceLineNo">1004</span>          } else if (!this.stopping) {<a name="line.1004"></a>
+<span class="sourceLineNo">1005</span>            this.stopping = true;<a name="line.1005"></a>
+<span class="sourceLineNo">1006</span>            LOG.info("Closing user regions");<a name="line.1006"></a>
+<span class="sourceLineNo">1007</span>            closeUserRegions(this.abortRequested);<a name="line.1007"></a>
+<span class="sourceLineNo">1008</span>          } else if (this.stopping) {<a name="line.1008"></a>
+<span class="sourceLineNo">1009</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1009"></a>
+<span class="sourceLineNo">1010</span>            if (allUserRegionsOffline) {<a name="line.1010"></a>
+<span class="sourceLineNo">1011</span>              // Set stopped if no more write requests tp meta tables<a name="line.1011"></a>
+<span class="sourceLineNo">1012</span>              // since last time we went around the loop.  Any open<a name="line.1012"></a>
+<span class="sourceLineNo">1013</span>              // meta regions will be closed on our way out.<a name="line.1013"></a>
+<span class="sourceLineNo">1014</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1014"></a>
+<span class="sourceLineNo">1015</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1015"></a>
+<span class="sourceLineNo">1016</span>                break;<a name="line.1016"></a>
+<span class="sourceLineNo">1017</span>              }<a name="line.1017"></a>
+<span class="sourceLineNo">1018</span>              oldRequestCount = getWriteRequestCount();<a name="line.1018"></a>
+<span class="sourceLineNo">1019</span>            } else {<a name="line.1019"></a>
+<span class="sourceLineNo">1020</span>              // Make sure all regions have been closed -- some regions may<a name="line.1020"></a>
+<span class="sourceLineNo">1021</span>              // have not got it because we were splitting at the time of<a name="line.1021"></a>
+<span class="sourceLineNo">1022</span>              // the call to closeUserRegions.<a name="line.1022"></a>
+<span class="sourceLineNo">1023</span>              closeUserRegions(this.abortRequested);<a name="line.1023"></a>
+<span class="sourceLineNo">1024</span>            }<a name="line.1024"></a>
+<span class="sourceLineNo">1025</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1025"></a>
+<span class="sourceLineNo">1026</span>          }<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        }<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>        if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          this.sleeper.sleep();<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>        }<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>      } // for<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>    } catch (Throwable t) {<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>      if (!rpcServices.checkOOME(t)) {<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>        String prefix = t instanceof YouAreDeadException? "": "Unhandled: ";<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        abort(prefix + t.getMessage(), t);<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>      }<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>    }<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span><a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>    if (abortRequested) {<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>      Timer abortMonitor = new Timer("Abort regionserver monitor", true);<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>      TimerTask abortTimeoutTask = null;<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>      try {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>        abortTimeoutTask =<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>            Class.forName(conf.get(ABORT_TIMEOUT_TASK, SystemExitWhenAbortTimeout.class.getName()))<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>                .asSubclass(TimerTask.class).getDeclaredConstructor().newInstance();<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>      } catch (Exception e) {<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        LOG.warn("Initialize abort timeout task failed", e);<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>      }<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>      if (abortTi

<TRUNCATED>

[19/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
index bf5af92..f32b223 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
@@ -223,7 +223,7 @@
 <span class="sourceLineNo">215</span>   * @param family column family configuration<a name="line.215"></a>
 <span class="sourceLineNo">216</span>   */<a name="line.216"></a>
 <span class="sourceLineNo">217</span>  public CacheConfig(Configuration conf, ColumnFamilyDescriptor family) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    this(CacheConfig.instantiateBlockCache(conf),<a name="line.218"></a>
+<span class="sourceLineNo">218</span>    this(GLOBAL_BLOCK_CACHE_INSTANCE,<a name="line.218"></a>
 <span class="sourceLineNo">219</span>        conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ)<a name="line.219"></a>
 <span class="sourceLineNo">220</span>           &amp;&amp; family.isBlockCacheEnabled(),<a name="line.220"></a>
 <span class="sourceLineNo">221</span>        family.isInMemory(),<a name="line.221"></a>
@@ -253,463 +253,443 @@
 <span class="sourceLineNo">245</span>   * @param conf hbase configuration<a name="line.245"></a>
 <span class="sourceLineNo">246</span>   */<a name="line.246"></a>
 <span class="sourceLineNo">247</span>  public CacheConfig(Configuration conf) {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    this(conf, true);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  }<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public CacheConfig(Configuration conf, boolean enableBlockCache) {<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    this(conf, enableBlockCache,<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ),<a name="line.253"></a>
-<span class="sourceLineNo">254</span>        DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set<a name="line.254"></a>
-<span class="sourceLineNo">255</span>                           // strictly from conf<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE),<a name="line.256"></a>
-<span class="sourceLineNo">257</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE),<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE),<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE),<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.260"></a>
-<span class="sourceLineNo">261</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN),<a name="line.261"></a>
-<span class="sourceLineNo">262</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY, DROP_BEHIND_CACHE_COMPACTION_DEFAULT));<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    LOG.info("Created cacheConfig: " + this);<a name="line.263"></a>
-<span class="sourceLineNo">264</span>  }<a name="line.264"></a>
-<span class="sourceLineNo">265</span><a name="line.265"></a>
-<span class="sourceLineNo">266</span>  private CacheConfig(Configuration conf, boolean enableBlockCache,<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      final boolean cacheDataOnRead, final boolean inMemory,<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      final boolean cacheBloomsOnWrite, final boolean evictOnClose,<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      final boolean cacheDataCompressed, final boolean prefetchOnOpen,<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      final boolean dropBehindCompaction) {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    if (enableBlockCache) {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      this.blockCache = CacheConfig.instantiateBlockCache(conf);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    } else {<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      this.blockCache = null;<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    }<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    this.cacheDataOnRead = cacheDataOnRead;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    this.inMemory = inMemory;<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    this.cacheIndexesOnWrite = cacheIndexesOnWrite;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    this.cacheBloomsOnWrite = cacheBloomsOnWrite;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    this.evictOnClose = evictOnClose;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.cacheDataCompressed = cacheDataCompressed;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.prefetchOnOpen = prefetchOnOpen;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.dropBehindCompaction = dropBehindCompaction;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  }<a name="line.286"></a>
-<span class="sourceLineNo">287</span><a name="line.287"></a>
-<span class="sourceLineNo">288</span>  /**<a name="line.288"></a>
-<span class="sourceLineNo">289</span>   * Create a block cache configuration with the specified cache and configuration parameters.<a name="line.289"></a>
-<span class="sourceLineNo">290</span>   * @param blockCache reference to block cache, null if completely disabled<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * @param cacheDataOnRead whether DATA blocks should be cached on read (we always cache INDEX<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   *          blocks and BLOOM blocks; this cannot be disabled).<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * @param inMemory whether blocks should be flagged as in-memory<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * @param cacheDataOnWrite whether data blocks should be cached on write<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   * @param cacheIndexesOnWrite whether index blocks should be cached on write<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * @param cacheBloomsOnWrite whether blooms should be cached on write<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   * @param evictOnClose whether blocks should be evicted when HFile is closed<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * @param cacheDataCompressed whether to store blocks as compressed in the cache<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param prefetchOnOpen whether to prefetch blocks upon open<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @param dropBehindCompaction indicate that we should set drop behind to true when open a store<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   *          file reader for compaction<a name="line.301"></a>
-<span class="sourceLineNo">302</span>   */<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  @VisibleForTesting<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  CacheConfig(final BlockCache blockCache,<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      final boolean cacheDataOnRead, final boolean inMemory,<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      final boolean cacheBloomsOnWrite, final boolean evictOnClose,<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      final boolean cacheDataCompressed, final boolean prefetchOnOpen,<a name="line.308"></a>
-<span class="sourceLineNo">309</span>      final boolean dropBehindCompaction) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    this.blockCache = blockCache;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    this.cacheDataOnRead = cacheDataOnRead;<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    this.inMemory = inMemory;<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    this.cacheIndexesOnWrite = cacheIndexesOnWrite;<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    this.cacheBloomsOnWrite = cacheBloomsOnWrite;<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    this.evictOnClose = evictOnClose;<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    this.cacheDataCompressed = cacheDataCompressed;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    this.prefetchOnOpen = prefetchOnOpen;<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    this.dropBehindCompaction = dropBehindCompaction;<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  }<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * Constructs a cache configuration copied from the specified configuration.<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * @param cacheConf<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   */<a name="line.325"></a>
-<span class="sourceLineNo">326</span>  public CacheConfig(CacheConfig cacheConf) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    this(cacheConf.blockCache, cacheConf.cacheDataOnRead, cacheConf.inMemory,<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        cacheConf.cacheDataOnWrite, cacheConf.cacheIndexesOnWrite,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        cacheConf.cacheBloomsOnWrite, cacheConf.evictOnClose,<a name="line.329"></a>
-<span class="sourceLineNo">330</span>        cacheConf.cacheDataCompressed, cacheConf.prefetchOnOpen,<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        cacheConf.dropBehindCompaction);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  }<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  private CacheConfig() {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    this(null, false, false, false, false, false, false, false, false, false);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>  }<a name="line.336"></a>
-<span class="sourceLineNo">337</span><a name="line.337"></a>
-<span class="sourceLineNo">338</span>  /**<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * Checks whether the block cache is enabled.<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   */<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  public boolean isBlockCacheEnabled() {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    return this.blockCache != null;<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  }<a name="line.343"></a>
-<span class="sourceLineNo">344</span><a name="line.344"></a>
-<span class="sourceLineNo">345</span>  /**<a name="line.345"></a>
-<span class="sourceLineNo">346</span>   * Returns the block cache.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * @return the block cache, or null if caching is completely disabled<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   */<a name="line.348"></a>
-<span class="sourceLineNo">349</span>  public BlockCache getBlockCache() {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    return this.blockCache;<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  }<a name="line.351"></a>
-<span class="sourceLineNo">352</span><a name="line.352"></a>
-<span class="sourceLineNo">353</span>  /**<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * Returns whether the DATA blocks of this HFile should be cached on read or not (we always<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * cache the meta blocks, the INDEX and BLOOM blocks).<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   * @return true if blocks should be cached on read, false if not<a name="line.356"></a>
+<span class="sourceLineNo">248</span>    this(GLOBAL_BLOCK_CACHE_INSTANCE,<a name="line.248"></a>
+<span class="sourceLineNo">249</span>        conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ),<a name="line.249"></a>
+<span class="sourceLineNo">250</span>        DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set<a name="line.250"></a>
+<span class="sourceLineNo">251</span>        // strictly from conf<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE),<a name="line.252"></a>
+<span class="sourceLineNo">253</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE),<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE),<a name="line.254"></a>
+<span class="sourceLineNo">255</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE),<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.256"></a>
+<span class="sourceLineNo">257</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN),<a name="line.257"></a>
+<span class="sourceLineNo">258</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY, DROP_BEHIND_CACHE_COMPACTION_DEFAULT));<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    LOG.info("Created cacheConfig: " + this);<a name="line.259"></a>
+<span class="sourceLineNo">260</span>  }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>  /**<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   * Create a block cache configuration with the specified cache and configuration parameters.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>   * @param blockCache reference to block cache, null if completely disabled<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * @param cacheDataOnRead whether DATA blocks should be cached on read (we always cache INDEX<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   *          blocks and BLOOM blocks; this cannot be disabled).<a name="line.266"></a>
+<span class="sourceLineNo">267</span>   * @param inMemory whether blocks should be flagged as in-memory<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * @param cacheDataOnWrite whether data blocks should be cached on write<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   * @param cacheIndexesOnWrite whether index blocks should be cached on write<a name="line.269"></a>
+<span class="sourceLineNo">270</span>   * @param cacheBloomsOnWrite whether blooms should be cached on write<a name="line.270"></a>
+<span class="sourceLineNo">271</span>   * @param evictOnClose whether blocks should be evicted when HFile is closed<a name="line.271"></a>
+<span class="sourceLineNo">272</span>   * @param cacheDataCompressed whether to store blocks as compressed in the cache<a name="line.272"></a>
+<span class="sourceLineNo">273</span>   * @param prefetchOnOpen whether to prefetch blocks upon open<a name="line.273"></a>
+<span class="sourceLineNo">274</span>   * @param dropBehindCompaction indicate that we should set drop behind to true when open a store<a name="line.274"></a>
+<span class="sourceLineNo">275</span>   *          file reader for compaction<a name="line.275"></a>
+<span class="sourceLineNo">276</span>   */<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  @VisibleForTesting<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  CacheConfig(final BlockCache blockCache,<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      final boolean cacheDataOnRead, final boolean inMemory,<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      final boolean cacheBloomsOnWrite, final boolean evictOnClose,<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      final boolean cacheDataCompressed, final boolean prefetchOnOpen,<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      final boolean dropBehindCompaction) {<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    this.blockCache = blockCache;<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    this.cacheDataOnRead = cacheDataOnRead;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    this.inMemory = inMemory;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    this.cacheIndexesOnWrite = cacheIndexesOnWrite;<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    this.cacheBloomsOnWrite = cacheBloomsOnWrite;<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    this.evictOnClose = evictOnClose;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    this.cacheDataCompressed = cacheDataCompressed;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.prefetchOnOpen = prefetchOnOpen;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    this.dropBehindCompaction = dropBehindCompaction;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  }<a name="line.294"></a>
+<span class="sourceLineNo">295</span><a name="line.295"></a>
+<span class="sourceLineNo">296</span>  /**<a name="line.296"></a>
+<span class="sourceLineNo">297</span>   * Constructs a cache configuration copied from the specified configuration.<a name="line.297"></a>
+<span class="sourceLineNo">298</span>   * @param cacheConf<a name="line.298"></a>
+<span class="sourceLineNo">299</span>   */<a name="line.299"></a>
+<span class="sourceLineNo">300</span>  public CacheConfig(CacheConfig cacheConf) {<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    this(cacheConf.blockCache, cacheConf.cacheDataOnRead, cacheConf.inMemory,<a name="line.301"></a>
+<span class="sourceLineNo">302</span>        cacheConf.cacheDataOnWrite, cacheConf.cacheIndexesOnWrite,<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        cacheConf.cacheBloomsOnWrite, cacheConf.evictOnClose,<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        cacheConf.cacheDataCompressed, cacheConf.prefetchOnOpen,<a name="line.304"></a>
+<span class="sourceLineNo">305</span>        cacheConf.dropBehindCompaction);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  }<a name="line.306"></a>
+<span class="sourceLineNo">307</span><a name="line.307"></a>
+<span class="sourceLineNo">308</span>  private CacheConfig() {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    this(null, false, false, false, false, false, false, false, false, false);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * Checks whether the block cache is enabled.<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   */<a name="line.314"></a>
+<span class="sourceLineNo">315</span>  public boolean isBlockCacheEnabled() {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    return this.blockCache != null;<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  /**<a name="line.319"></a>
+<span class="sourceLineNo">320</span>   * Returns the block cache.<a name="line.320"></a>
+<span class="sourceLineNo">321</span>   * @return the block cache, or null if caching is completely disabled<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   */<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  public BlockCache getBlockCache() {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    return this.blockCache;<a name="line.324"></a>
+<span class="sourceLineNo">325</span>  }<a name="line.325"></a>
+<span class="sourceLineNo">326</span><a name="line.326"></a>
+<span class="sourceLineNo">327</span>  /**<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * Returns whether the DATA blocks of this HFile should be cached on read or not (we always<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * cache the meta blocks, the INDEX and BLOOM blocks).<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * @return true if blocks should be cached on read, false if not<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  public boolean shouldCacheDataOnRead() {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>    return isBlockCacheEnabled() &amp;&amp; cacheDataOnRead;<a name="line.333"></a>
+<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  public boolean shouldDropBehindCompaction() {<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    return dropBehindCompaction;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  }<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  /**<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * Should we cache a block of a particular category? We always cache<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * important blocks such as index blocks, as long as the block cache is<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * available.<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  public boolean shouldCacheBlockOnRead(BlockCategory category) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    return isBlockCacheEnabled()<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        &amp;&amp; (cacheDataOnRead ||<a name="line.347"></a>
+<span class="sourceLineNo">348</span>            category == BlockCategory.INDEX ||<a name="line.348"></a>
+<span class="sourceLineNo">349</span>            category == BlockCategory.BLOOM ||<a name="line.349"></a>
+<span class="sourceLineNo">350</span>            (prefetchOnOpen &amp;&amp;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>                (category != BlockCategory.META &amp;&amp;<a name="line.351"></a>
+<span class="sourceLineNo">352</span>                 category != BlockCategory.UNKNOWN)));<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  }<a name="line.353"></a>
+<span class="sourceLineNo">354</span><a name="line.354"></a>
+<span class="sourceLineNo">355</span>  /**<a name="line.355"></a>
+<span class="sourceLineNo">356</span>   * @return true if blocks in this file should be flagged as in-memory<a name="line.356"></a>
 <span class="sourceLineNo">357</span>   */<a name="line.357"></a>
-<span class="sourceLineNo">358</span>  public boolean shouldCacheDataOnRead() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    return isBlockCacheEnabled() &amp;&amp; cacheDataOnRead;<a name="line.359"></a>
+<span class="sourceLineNo">358</span>  public boolean isInMemory() {<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    return isBlockCacheEnabled() &amp;&amp; this.inMemory;<a name="line.359"></a>
 <span class="sourceLineNo">360</span>  }<a name="line.360"></a>
 <span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  public boolean shouldDropBehindCompaction() {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return dropBehindCompaction;<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Should we cache a block of a particular category? We always cache<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   * important blocks such as index blocks, as long as the block cache is<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * available.<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   */<a name="line.370"></a>
-<span class="sourceLineNo">371</span>  public boolean shouldCacheBlockOnRead(BlockCategory category) {<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    return isBlockCacheEnabled()<a name="line.372"></a>
-<span class="sourceLineNo">373</span>        &amp;&amp; (cacheDataOnRead ||<a name="line.373"></a>
-<span class="sourceLineNo">374</span>            category == BlockCategory.INDEX ||<a name="line.374"></a>
-<span class="sourceLineNo">375</span>            category == BlockCategory.BLOOM ||<a name="line.375"></a>
-<span class="sourceLineNo">376</span>            (prefetchOnOpen &amp;&amp;<a name="line.376"></a>
-<span class="sourceLineNo">377</span>                (category != BlockCategory.META &amp;&amp;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>                 category != BlockCategory.UNKNOWN)));<a name="line.378"></a>
-<span class="sourceLineNo">379</span>  }<a name="line.379"></a>
-<span class="sourceLineNo">380</span><a name="line.380"></a>
-<span class="sourceLineNo">381</span>  /**<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * @return true if blocks in this file should be flagged as in-memory<a name="line.382"></a>
+<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * @return true if data blocks should be written to the cache when an HFile is<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   *         written, false if not<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   */<a name="line.365"></a>
+<span class="sourceLineNo">366</span>  public boolean shouldCacheDataOnWrite() {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnWrite;<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  }<a name="line.368"></a>
+<span class="sourceLineNo">369</span><a name="line.369"></a>
+<span class="sourceLineNo">370</span>  /**<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   * Only used for testing.<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @param cacheDataOnWrite whether data blocks should be written to the cache<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *                         when an HFile is written<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  @VisibleForTesting<a name="line.375"></a>
+<span class="sourceLineNo">376</span>  public void setCacheDataOnWrite(boolean cacheDataOnWrite) {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.377"></a>
+<span class="sourceLineNo">378</span>  }<a name="line.378"></a>
+<span class="sourceLineNo">379</span><a name="line.379"></a>
+<span class="sourceLineNo">380</span>  /**<a name="line.380"></a>
+<span class="sourceLineNo">381</span>   * @return true if index blocks should be written to the cache when an HFile<a name="line.381"></a>
+<span class="sourceLineNo">382</span>   *         is written, false if not<a name="line.382"></a>
 <span class="sourceLineNo">383</span>   */<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  public boolean isInMemory() {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    return isBlockCacheEnabled() &amp;&amp; this.inMemory;<a name="line.385"></a>
+<span class="sourceLineNo">384</span>  public boolean shouldCacheIndexesOnWrite() {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheIndexesOnWrite;<a name="line.385"></a>
 <span class="sourceLineNo">386</span>  }<a name="line.386"></a>
 <span class="sourceLineNo">387</span><a name="line.387"></a>
 <span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
-<span class="sourceLineNo">389</span>   * @return true if data blocks should be written to the cache when an HFile is<a name="line.389"></a>
-<span class="sourceLineNo">390</span>   *         written, false if not<a name="line.390"></a>
+<span class="sourceLineNo">389</span>   * @return true if bloom blocks should be written to the cache when an HFile<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   *         is written, false if not<a name="line.390"></a>
 <span class="sourceLineNo">391</span>   */<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  public boolean shouldCacheDataOnWrite() {<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnWrite;<a name="line.393"></a>
+<span class="sourceLineNo">392</span>  public boolean shouldCacheBloomsOnWrite() {<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheBloomsOnWrite;<a name="line.393"></a>
 <span class="sourceLineNo">394</span>  }<a name="line.394"></a>
 <span class="sourceLineNo">395</span><a name="line.395"></a>
 <span class="sourceLineNo">396</span>  /**<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * Only used for testing.<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param cacheDataOnWrite whether data blocks should be written to the cache<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *                         when an HFile is written<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  @VisibleForTesting<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  public void setCacheDataOnWrite(boolean cacheDataOnWrite) {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  }<a name="line.404"></a>
-<span class="sourceLineNo">405</span><a name="line.405"></a>
-<span class="sourceLineNo">406</span>  /**<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   * @return true if index blocks should be written to the cache when an HFile<a name="line.407"></a>
-<span class="sourceLineNo">408</span>   *         is written, false if not<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   */<a name="line.409"></a>
-<span class="sourceLineNo">410</span>  public boolean shouldCacheIndexesOnWrite() {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheIndexesOnWrite;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @return true if bloom blocks should be written to the cache when an HFile<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *         is written, false if not<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   */<a name="line.417"></a>
-<span class="sourceLineNo">418</span>  public boolean shouldCacheBloomsOnWrite() {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheBloomsOnWrite;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   * @return true if blocks should be evicted from the cache when an HFile<a name="line.423"></a>
-<span class="sourceLineNo">424</span>   *         reader is closed, false if not<a name="line.424"></a>
-<span class="sourceLineNo">425</span>   */<a name="line.425"></a>
-<span class="sourceLineNo">426</span>  public boolean shouldEvictOnClose() {<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    return isBlockCacheEnabled() &amp;&amp; this.evictOnClose;<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * Only used for testing.<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * @param evictOnClose whether blocks should be evicted from the cache when an<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   *                     HFile reader is closed<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  public void setEvictOnClose(boolean evictOnClose) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    this.evictOnClose = evictOnClose;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  /**<a name="line.439"></a>
-<span class="sourceLineNo">440</span>   * @return true if data blocks should be compressed in the cache, false if not<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   */<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  public boolean shouldCacheDataCompressed() {<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  }<a name="line.444"></a>
-<span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  /**<a name="line.446"></a>
-<span class="sourceLineNo">447</span>   * @return true if this {@link BlockCategory} should be compressed in blockcache, false otherwise<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   */<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  public boolean shouldCacheCompressed(BlockCategory category) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    if (!isBlockCacheEnabled()) return false;<a name="line.450"></a>
-<span class="sourceLineNo">451</span>    switch (category) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      case DATA:<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        return this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      default:<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        return false;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    }<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * @return true if blocks should be prefetched into the cache on open, false if not<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  public boolean shouldPrefetchOnOpen() {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    return isBlockCacheEnabled() &amp;&amp; this.prefetchOnOpen;<a name="line.463"></a>
-<span class="sourceLineNo">464</span>  }<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>  /**<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * Return true if we may find this type of block in block cache.<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * &lt;p&gt;<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   * TODO: today {@code family.isBlockCacheEnabled()} only means {@code cacheDataOnRead}, so here we<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * consider lots of other configurations such as {@code cacheDataOnWrite}. We should fix this in<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   * the future, {@code cacheDataOnWrite} should honor the CF level {@code isBlockCacheEnabled}<a name="line.471"></a>
-<span class="sourceLineNo">472</span>   * configuration.<a name="line.472"></a>
-<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
-<span class="sourceLineNo">474</span>  public boolean shouldReadBlockFromCache(BlockType blockType) {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    if (!isBlockCacheEnabled()) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>      return false;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    }<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    if (cacheDataOnRead) {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      return true;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    if (prefetchOnOpen) {<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      return true;<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    }<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    if (cacheDataOnWrite) {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      return true;<a name="line.485"></a>
+<span class="sourceLineNo">397</span>   * @return true if blocks should be evicted from the cache when an HFile<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   *         reader is closed, false if not<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  public boolean shouldEvictOnClose() {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return isBlockCacheEnabled() &amp;&amp; this.evictOnClose;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>  }<a name="line.402"></a>
+<span class="sourceLineNo">403</span><a name="line.403"></a>
+<span class="sourceLineNo">404</span>  /**<a name="line.404"></a>
+<span class="sourceLineNo">405</span>   * Only used for testing.<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * @param evictOnClose whether blocks should be evicted from the cache when an<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   *                     HFile reader is closed<a name="line.407"></a>
+<span class="sourceLineNo">408</span>   */<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  public void setEvictOnClose(boolean evictOnClose) {<a name="line.409"></a>
+<span class="sourceLineNo">410</span>    this.evictOnClose = evictOnClose;<a name="line.410"></a>
+<span class="sourceLineNo">411</span>  }<a name="line.411"></a>
+<span class="sourceLineNo">412</span><a name="line.412"></a>
+<span class="sourceLineNo">413</span>  /**<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @return true if data blocks should be compressed in the cache, false if not<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  public boolean shouldCacheDataCompressed() {<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>  }<a name="line.418"></a>
+<span class="sourceLineNo">419</span><a name="line.419"></a>
+<span class="sourceLineNo">420</span>  /**<a name="line.420"></a>
+<span class="sourceLineNo">421</span>   * @return true if this {@link BlockCategory} should be compressed in blockcache, false otherwise<a name="line.421"></a>
+<span class="sourceLineNo">422</span>   */<a name="line.422"></a>
+<span class="sourceLineNo">423</span>  public boolean shouldCacheCompressed(BlockCategory category) {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>    if (!isBlockCacheEnabled()) return false;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    switch (category) {<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      case DATA:<a name="line.426"></a>
+<span class="sourceLineNo">427</span>        return this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.427"></a>
+<span class="sourceLineNo">428</span>      default:<a name="line.428"></a>
+<span class="sourceLineNo">429</span>        return false;<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    }<a name="line.430"></a>
+<span class="sourceLineNo">431</span>  }<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>  /**<a name="line.433"></a>
+<span class="sourceLineNo">434</span>   * @return true if blocks should be prefetched into the cache on open, false if not<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  public boolean shouldPrefetchOnOpen() {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>    return isBlockCacheEnabled() &amp;&amp; this.prefetchOnOpen;<a name="line.437"></a>
+<span class="sourceLineNo">438</span>  }<a name="line.438"></a>
+<span class="sourceLineNo">439</span><a name="line.439"></a>
+<span class="sourceLineNo">440</span>  /**<a name="line.440"></a>
+<span class="sourceLineNo">441</span>   * Return true if we may find this type of block in block cache.<a name="line.441"></a>
+<span class="sourceLineNo">442</span>   * &lt;p&gt;<a name="line.442"></a>
+<span class="sourceLineNo">443</span>   * TODO: today {@code family.isBlockCacheEnabled()} only means {@code cacheDataOnRead}, so here we<a name="line.443"></a>
+<span class="sourceLineNo">444</span>   * consider lots of other configurations such as {@code cacheDataOnWrite}. We should fix this in<a name="line.444"></a>
+<span class="sourceLineNo">445</span>   * the future, {@code cacheDataOnWrite} should honor the CF level {@code isBlockCacheEnabled}<a name="line.445"></a>
+<span class="sourceLineNo">446</span>   * configuration.<a name="line.446"></a>
+<span class="sourceLineNo">447</span>   */<a name="line.447"></a>
+<span class="sourceLineNo">448</span>  public boolean shouldReadBlockFromCache(BlockType blockType) {<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    if (!isBlockCacheEnabled()) {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      return false;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    }<a name="line.451"></a>
+<span class="sourceLineNo">452</span>    if (cacheDataOnRead) {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>      return true;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    }<a name="line.454"></a>
+<span class="sourceLineNo">455</span>    if (prefetchOnOpen) {<a name="line.455"></a>
+<span class="sourceLineNo">456</span>      return true;<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    if (cacheDataOnWrite) {<a name="line.458"></a>
+<span class="sourceLineNo">459</span>      return true;<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    }<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    if (blockType == null) {<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      return true;<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    }<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    if (blockType.getCategory() == BlockCategory.BLOOM ||<a name="line.464"></a>
+<span class="sourceLineNo">465</span>            blockType.getCategory() == BlockCategory.INDEX) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      return true;<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return false;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * If we make sure the block could not be cached, we will not acquire the lock<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   * otherwise we will acquire lock<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   */<a name="line.474"></a>
+<span class="sourceLineNo">475</span>  public boolean shouldLockOnCacheMiss(BlockType blockType) {<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    if (blockType == null) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>      return true;<a name="line.477"></a>
+<span class="sourceLineNo">478</span>    }<a name="line.478"></a>
+<span class="sourceLineNo">479</span>    return shouldCacheBlockOnRead(blockType.getCategory());<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>  @Override<a name="line.482"></a>
+<span class="sourceLineNo">483</span>  public String toString() {<a name="line.483"></a>
+<span class="sourceLineNo">484</span>    if (!isBlockCacheEnabled()) {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>      return "CacheConfig:disabled";<a name="line.485"></a>
 <span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    if (blockType == null) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>      return true;<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    }<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (blockType.getCategory() == BlockCategory.BLOOM ||<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            blockType.getCategory() == BlockCategory.INDEX) {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      return true;<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    return false;<a name="line.494"></a>
+<span class="sourceLineNo">487</span>    return "blockCache=" + getBlockCache() +<a name="line.487"></a>
+<span class="sourceLineNo">488</span>      ", cacheDataOnRead=" + shouldCacheDataOnRead() +<a name="line.488"></a>
+<span class="sourceLineNo">489</span>      ", cacheDataOnWrite=" + shouldCacheDataOnWrite() +<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      ", cacheIndexesOnWrite=" + shouldCacheIndexesOnWrite() +<a name="line.490"></a>
+<span class="sourceLineNo">491</span>      ", cacheBloomsOnWrite=" + shouldCacheBloomsOnWrite() +<a name="line.491"></a>
+<span class="sourceLineNo">492</span>      ", cacheEvictOnClose=" + shouldEvictOnClose() +<a name="line.492"></a>
+<span class="sourceLineNo">493</span>      ", cacheDataCompressed=" + shouldCacheDataCompressed() +<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      ", prefetchOnOpen=" + shouldPrefetchOnOpen();<a name="line.494"></a>
 <span class="sourceLineNo">495</span>  }<a name="line.495"></a>
 <span class="sourceLineNo">496</span><a name="line.496"></a>
-<span class="sourceLineNo">497</span>  /**<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * If we make sure the block could not be cached, we will not acquire the lock<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * otherwise we will acquire lock<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   */<a name="line.500"></a>
-<span class="sourceLineNo">501</span>  public boolean shouldLockOnCacheMiss(BlockType blockType) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>    if (blockType == null) {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      return true;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    }<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    return shouldCacheBlockOnRead(blockType.getCategory());<a name="line.505"></a>
-<span class="sourceLineNo">506</span>  }<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>  @Override<a name="line.508"></a>
-<span class="sourceLineNo">509</span>  public String toString() {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>    if (!isBlockCacheEnabled()) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>      return "CacheConfig:disabled";<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>    return "blockCache=" + getBlockCache() +<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      ", cacheDataOnRead=" + shouldCacheDataOnRead() +<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      ", cacheDataOnWrite=" + shouldCacheDataOnWrite() +<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      ", cacheIndexesOnWrite=" + shouldCacheIndexesOnWrite() +<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      ", cacheBloomsOnWrite=" + shouldCacheBloomsOnWrite() +<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      ", cacheEvictOnClose=" + shouldEvictOnClose() +<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      ", cacheDataCompressed=" + shouldCacheDataCompressed() +<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      ", prefetchOnOpen=" + shouldPrefetchOnOpen();<a name="line.520"></a>
-<span class="sourceLineNo">521</span>  }<a name="line.521"></a>
-<span class="sourceLineNo">522</span><a name="line.522"></a>
-<span class="sourceLineNo">523</span>  // Static block cache reference and methods<a name="line.523"></a>
-<span class="sourceLineNo">524</span><a name="line.524"></a>
-<span class="sourceLineNo">525</span>  /**<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   * Static reference to the block cache, or null if no caching should be used<a name="line.526"></a>
-<span class="sourceLineNo">527</span>   * at all.<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   */<a name="line.528"></a>
-<span class="sourceLineNo">529</span>  // Clear this if in tests you'd make more than one block cache instance.<a name="line.529"></a>
-<span class="sourceLineNo">530</span>  @VisibleForTesting<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  static BlockCache GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  private static LruBlockCache ONHEAP_CACHE_INSTANCE = null;<a name="line.532"></a>
-<span class="sourceLineNo">533</span>  private static BlockCache L2_CACHE_INSTANCE = null;// Can be BucketCache or External cache.<a name="line.533"></a>
+<span class="sourceLineNo">497</span>  // Static block cache reference and methods<a name="line.497"></a>
+<span class="sourceLineNo">498</span><a name="line.498"></a>
+<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
+<span class="sourceLineNo">500</span>   * Static reference to the block cache, or null if no caching should be used<a name="line.500"></a>
+<span class="sourceLineNo">501</span>   * at all.<a name="line.501"></a>
+<span class="sourceLineNo">502</span>   */<a name="line.502"></a>
+<span class="sourceLineNo">503</span>  // Clear this if in tests you'd make more than one block cache instance.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>  @VisibleForTesting<a name="line.504"></a>
+<span class="sourceLineNo">505</span>  static BlockCache GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  private static LruBlockCache ONHEAP_CACHE_INSTANCE = null;<a name="line.506"></a>
+<span class="sourceLineNo">507</span>  private static BlockCache L2_CACHE_INSTANCE = null;// Can be BucketCache or External cache.<a name="line.507"></a>
+<span class="sourceLineNo">508</span><a name="line.508"></a>
+<span class="sourceLineNo">509</span>  /** Boolean whether we have disabled the block cache entirely. */<a name="line.509"></a>
+<span class="sourceLineNo">510</span>  @VisibleForTesting<a name="line.510"></a>
+<span class="sourceLineNo">511</span>  static boolean blockCacheDisabled = false;<a name="line.511"></a>
+<span class="sourceLineNo">512</span><a name="line.512"></a>
+<span class="sourceLineNo">513</span>  /**<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   * @param c Configuration to use.<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * @return An L1 instance.  Currently an instance of LruBlockCache.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   */<a name="line.516"></a>
+<span class="sourceLineNo">517</span>  public static LruBlockCache getOnHeapCache(final Configuration c) {<a name="line.517"></a>
+<span class="sourceLineNo">518</span>    return getOnHeapCacheInternal(c);<a name="line.518"></a>
+<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
+<span class="sourceLineNo">520</span><a name="line.520"></a>
+<span class="sourceLineNo">521</span>  public CacheStats getOnHeapCacheStats() {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    if (ONHEAP_CACHE_INSTANCE != null) {<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      return ONHEAP_CACHE_INSTANCE.getStats();<a name="line.523"></a>
+<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
+<span class="sourceLineNo">525</span>    return null;<a name="line.525"></a>
+<span class="sourceLineNo">526</span>  }<a name="line.526"></a>
+<span class="sourceLineNo">527</span><a name="line.527"></a>
+<span class="sourceLineNo">528</span>  public CacheStats getL2CacheStats() {<a name="line.528"></a>
+<span class="sourceLineNo">529</span>    if (L2_CACHE_INSTANCE != null) {<a name="line.529"></a>
+<span class="sourceLineNo">530</span>      return L2_CACHE_INSTANCE.getStats();<a name="line.530"></a>
+<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
+<span class="sourceLineNo">532</span>    return null;<a name="line.532"></a>
+<span class="sourceLineNo">533</span>  }<a name="line.533"></a>
 <span class="sourceLineNo">534</span><a name="line.534"></a>
-<span class="sourceLineNo">535</span>  /** Boolean whether we have disabled the block cache entirely. */<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  static boolean blockCacheDisabled = false;<a name="line.537"></a>
-<span class="sourceLineNo">538</span><a name="line.538"></a>
-<span class="sourceLineNo">539</span>  /**<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @param c Configuration to use.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   * @return An L1 instance.  Currently an instance of LruBlockCache.<a name="line.541"></a>
-<span class="sourceLineNo">542</span>   */<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  public static LruBlockCache getOnHeapCache(final Configuration c) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    return getOnHeapCacheInternal(c);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>  }<a name="line.545"></a>
-<span class="sourceLineNo">546</span><a name="line.546"></a>
-<span class="sourceLineNo">547</span>  public CacheStats getOnHeapCacheStats() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    if (ONHEAP_CACHE_INSTANCE != null) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>      return ONHEAP_CACHE_INSTANCE.getStats();<a name="line.549"></a>
-<span class="sourceLineNo">550</span>    }<a name="line.550"></a>
-<span class="sourceLineNo">551</span>    return null;<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  }<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>  public CacheStats getL2CacheStats() {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    if (L2_CACHE_INSTANCE != null) {<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      return L2_CACHE_INSTANCE.getStats();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    return null;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>  }<a name="line.559"></a>
+<span class="sourceLineNo">535</span>  /**<a name="line.535"></a>
+<span class="sourceLineNo">536</span>   * @param c Configuration to use.<a name="line.536"></a>
+<span class="sourceLineNo">537</span>   * @return An L1 instance.  Currently an instance of LruBlockCache.<a name="line.537"></a>
+<span class="sourceLineNo">538</span>   */<a name="line.538"></a>
+<span class="sourceLineNo">539</span>  private synchronized static LruBlockCache getOnHeapCacheInternal(final Configuration c) {<a name="line.539"></a>
+<span class="sourceLineNo">540</span>    if (ONHEAP_CACHE_INSTANCE != null) {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      return ONHEAP_CACHE_INSTANCE;<a name="line.541"></a>
+<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
+<span class="sourceLineNo">543</span>    final long cacheSize = MemorySizeUtil.getOnHeapCacheSize(c);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>    if (cacheSize &lt; 0) {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      blockCacheDisabled = true;<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>    if (blockCacheDisabled) return null;<a name="line.547"></a>
+<span class="sourceLineNo">548</span>    int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);<a name="line.548"></a>
+<span class="sourceLineNo">549</span>    LOG.info("Allocating onheap LruBlockCache size=" +<a name="line.549"></a>
+<span class="sourceLineNo">550</span>      StringUtils.byteDesc(cacheSize) + ", blockSize=" + StringUtils.byteDesc(blockSize));<a name="line.550"></a>
+<span class="sourceLineNo">551</span>    ONHEAP_CACHE_INSTANCE = new LruBlockCache(cacheSize, blockSize, true, c);<a name="line.551"></a>
+<span class="sourceLineNo">552</span>    return ONHEAP_CACHE_INSTANCE;<a name="line.552"></a>
+<span class="sourceLineNo">553</span>  }<a name="line.553"></a>
+<span class="sourceLineNo">554</span><a name="line.554"></a>
+<span class="sourceLineNo">555</span>  private static BlockCache getExternalBlockcache(Configuration c) {<a name="line.555"></a>
+<span class="sourceLineNo">556</span>    if (LOG.isDebugEnabled()) {<a name="line.556"></a>
+<span class="sourceLineNo">557</span>      LOG.debug("Trying to use External l2 cache");<a name="line.557"></a>
+<span class="sourceLineNo">558</span>    }<a name="line.558"></a>
+<span class="sourceLineNo">559</span>    Class klass = null;<a name="line.559"></a>
 <span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>  /**<a name="line.561"></a>
-<span class="sourceLineNo">562</span>   * @param c Configuration to use.<a name="line.562"></a>
-<span class="sourceLineNo">563</span>   * @return An L1 instance.  Currently an instance of LruBlockCache.<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   */<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  private synchronized static LruBlockCache getOnHeapCacheInternal(final Configuration c) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    if (ONHEAP_CACHE_INSTANCE != null) {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      return ONHEAP_CACHE_INSTANCE;<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    final long cacheSize = MemorySizeUtil.getOnHeapCacheSize(c);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    if (cacheSize &lt; 0) {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>      blockCacheDisabled = true;<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    }<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    if (blockCacheDisabled) return null;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    LOG.info("Allocating onheap LruBlockCache size=" +<a name="line.575"></a>
-<span class="sourceLineNo">576</span>      StringUtils.byteDesc(cacheSize) + ", blockSize=" + StringUtils.byteDesc(blockSize));<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    ONHEAP_CACHE_INSTANCE = new LruBlockCache(cacheSize, blockSize, true, c);<a name="line.577"></a>
-<span class="sourceLineNo">578</span>    return ONHEAP_CACHE_INSTANCE;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>  }<a name="line.579"></a>
-<span class="sourceLineNo">580</span><a name="line.580"></a>
-<span class="sourceLineNo">581</span>  private static BlockCache getExternalBlockcache(Configuration c) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    if (LOG.isDebugEnabled()) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      LOG.debug("Trying to use External l2 cache");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    Class klass = null;<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>    // Get the class, from the config. s<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    try {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      klass = ExternalBlockCaches.valueOf(c.get(EXTERNAL_BLOCKCACHE_CLASS_KEY, "memcache")).clazz;<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    } catch (IllegalArgumentException exception) {<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      try {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>        klass = c.getClass(EXTERNAL_BLOCKCACHE_CLASS_KEY, Class.forName(<a name="line.592"></a>
-<span class="sourceLineNo">593</span>            "org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache"));<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      } catch (ClassNotFoundException e) {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        return null;<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      }<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    }<a name="line.597"></a>
-<span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>    // Now try and create an instance of the block cache.<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    try {<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      LOG.info("Creating external block cache of type: " + klass);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      return (BlockCache) ReflectionUtils.newInstance(klass, c);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    } catch (Exception e) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      LOG.warn("Error creating external block cache", e);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>    }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    return null;<a name="line.606"></a>
-<span class="sourceLineNo">607</span><a name="line.607"></a>
-<span class="sourceLineNo">608</span>  }<a name="line.608"></a>
-<span class="sourceLineNo">609</span><a name="line.609"></a>
-<span class="sourceLineNo">610</span>  @VisibleForTesting<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  static BucketCache getBucketCache(Configuration c) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    // Check for L2.  ioengine name must be non-null.<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    String bucketCacheIOEngineName = c.get(BUCKET_CACHE_IOENGINE_KEY, null);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    if (bucketCacheIOEngineName == null || bucketCacheIOEngineName.length() &lt;= 0) return null;<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>    int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    final long bucketCacheSize = MemorySizeUtil.getBucketCacheSize(c);<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    if (bucketCacheSize &lt;= 0) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      throw new IllegalStateException("bucketCacheSize &lt;= 0; Check " +<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        BUCKET_CACHE_SIZE_KEY + " setting and/or server java heap size");<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    }<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    if (c.get("hbase.bucketcache.percentage.in.combinedcache") != null) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      LOG.warn("Configuration 'hbase.bucketcache.percentage.in.combinedcache' is no longer "<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          + "respected. See comments in http://hbase.apache.org/book.html#_changes_of_note");<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    }<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    int writerThreads = c.getInt(BUCKET_CACHE_WRITER_THREADS_KEY,<a name="line.626"></a>
-<span class="sourceLineNo">627</span>      DEFAULT_BUCKET_CACHE_WRITER_THREADS);<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    int writerQueueLen = c.getInt(BUCKET_CACHE_WRITER_QUEUE_KEY,<a name="line.628"></a>
-<span class="sourceLineNo">629</span>      DEFAULT_BUCKET_CACHE_WRITER_QUEUE);<a name="line.629"></a>
-<span class="sourceLineNo">630</span>    String persistentPath = c.get(BUCKET_CACHE_PERSISTENT_PATH_KEY);<a name="line.630"></a>
-<span class="sourceLineNo">631</span>    String[] configuredBucketSizes = c.getStrings(BUCKET_CACHE_BUCKETS_KEY);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    int [] bucketSizes = null;<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    if (configuredBucketSizes != null) {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>      bucketSizes = new int[configuredBucketSizes.length];<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      for (int i = 0; i &lt; configuredBucketSizes.length; i++) {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());<a name="line.636"></a>
-<span class="sourceLineNo">637</span>        if (bucketSize % 256 != 0) {<a name="line.637"></a>
-<span class="sourceLineNo">638</span>          // We need all the bucket sizes to be multiples of 256. Having all the configured bucket<a name="line.638"></a>
-<span class="sourceLineNo">639</span>          // sizes to be multiples of 256 will ensure that the block offsets within buckets,<a name="line.639"></a>
-<span class="sourceLineNo">640</span>          // that are calculated, will also be multiples of 256.<a name="line.640"></a>
-<span class="sourceLineNo">641</span>          // See BucketEntry where offset to each block is represented using 5 bytes (instead of 8<a name="line.641"></a>
-<span class="sourceLineNo">642</span>          // bytes long). We would like to save heap overhead as less as possible.<a name="line.642"></a>
-<span class="sourceLineNo">643</span>          throw new IllegalArgumentException("Illegal value: " + bucketSize + " configured for '"<a name="line.643"></a>
-<span class="sourceLineNo">644</span>              + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be multiples of 256");<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        }<a name="line.645"></a>
-<span class="sourceLineNo">646</span>        bucketSizes[i] = bucketSize;<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      }<a name="line.647"></a>
+<span class="sourceLineNo">561</span>    // Get the class, from the config. s<a name="line.561"></a>
+<span class="sourceLineNo">562</span>    try {<a name="line.562"></a>
+<span class="sourceLineNo">563</span>      klass = ExternalBlockCaches.valueOf(c.get(EXTERNAL_BLOCKCACHE_CLASS_KEY, "memcache")).clazz;<a name="line.563"></a>
+<span class="sourceLineNo">564</span>    } catch (IllegalArgumentException exception) {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      try {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>        klass = c.getClass(EXTERNAL_BLOCKCACHE_CLASS_KEY, Class.forName(<a name="line.566"></a>
+<span class="sourceLineNo">567</span>            "org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache"));<a name="line.567"></a>
+<span class="sourceLineNo">568</span>      } catch (ClassNotFoundException e) {<a name="line.568"></a>
+<span class="sourceLineNo">569</span>        return null;<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      }<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    }<a name="line.571"></a>
+<span class="sourceLineNo">572</span><a name="line.572"></a>
+<span class="sourceLineNo">573</span>    // Now try and create an instance of the block cache.<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    try {<a name="line.574"></a>
+<span class="sourceLineNo">575</span>      LOG.info("Creating external block cache of type: " + klass);<a name="line.575"></a>
+<span class="sourceLineNo">576</span>      return (BlockCache) ReflectionUtils.newInstance(klass, c);<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    } catch (Exception e) {<a name="line.577"></a>
+<span class="sourceLineNo">578</span>      LOG.warn("Error creating external block cache", e);<a name="line.578"></a>
+<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
+<span class="sourceLineNo">580</span>    return null;<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>  }<a name="line.582"></a>
+<span class="sourceLineNo">583</span><a name="line.583"></a>
+<span class="sourceLineNo">584</span>  @VisibleForTesting<a name="line.584"></a>
+<span class="sourceLineNo">585</span>  static BucketCache getBucketCache(Configuration c) {<a name="line.585"></a>
+<span class="sourceLineNo">586</span>    // Check for L2.  ioengine name must be non-null.<a name="line.586"></a>
+<span class="sourceLineNo">587</span>    String bucketCacheIOEngineName = c.get(BUCKET_CACHE_IOENGINE_KEY, null);<a name="line.587"></a>
+<span class="sourceLineNo">588</span>    if (bucketCacheIOEngineName == null || bucketCacheIOEngineName.length() &lt;= 0) return null;<a name="line.588"></a>
+<span class="sourceLineNo">589</span><a name="line.589"></a>
+<span class="sourceLineNo">590</span>    int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    final long bucketCacheSize = MemorySizeUtil.getBucketCacheSize(c);<a name="line.591"></a>
+<span class="sourceLineNo">592</span>    if (bucketCacheSize &lt;= 0) {<a name="line.592"></a>
+<span class="sourceLineNo">593</span>      throw new IllegalStateException("bucketCacheSize &lt;= 0; Check " +<a name="line.593"></a>
+<span class="sourceLineNo">594</span>        BUCKET_CACHE_SIZE_KEY + " setting and/or server java heap size");<a name="line.594"></a>
+<span class="sourceLineNo">595</span>    }<a name="line.595"></a>
+<span class="sourceLineNo">596</span>    if (c.get("hbase.bucketcache.percentage.in.combinedcache") != null) {<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      LOG.warn("Configuration 'hbase.bucketcache.percentage.in.combinedcache' is no longer "<a name="line.597"></a>
+<span class="sourceLineNo">598</span>          + "respected. See comments in http://hbase.apache.org/book.html#_changes_of_note");<a name="line.598"></a>
+<span class="sourceLineNo">599</span>    }<a name="line.599"></a>
+<span class="sourceLineNo">600</span>    int writerThreads = c.getInt(BUCKET_CACHE_WRITER_THREADS_KEY,<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      DEFAULT_BUCKET_CACHE_WRITER_THREADS);<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    int writerQueueLen = c.getInt(BUCKET_CACHE_WRITER_QUEUE_KEY,<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      DEFAULT_BUCKET_CACHE_WRITER_QUEUE);<a name="line.603"></a>
+<span class="sourceLineNo">604</span>    String persistentPath = c.get(BUCKET_CACHE_PERSISTENT_PATH_KEY);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>    String[] configuredBucketSizes = c.getStrings(BUCKET_CACHE_BUCKETS_KEY);<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    int [] bucketSizes = null;<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    if (configuredBucketSizes != null) {<a name="line.607"></a>
+<span class="sourceLineNo">608</span>      bucketSizes = new int[configuredBucketSizes.length];<a name="line.608"></a>
+<span class="sourceLineNo">609</span>      for (int i = 0; i &lt; configuredBucketSizes.length; i++) {<a name="line.609"></a>
+<span class="sourceLineNo">610</span>        int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        if (bucketSize % 256 != 0) {<a name="line.611"></a>
+<span class="sourceLineNo">612</span>          // We need all the bucket sizes to be multiples of 256. Having all the configured bucket<a name="line.612"></a>
+<span class="sourceLineNo">613</span>          // sizes to be multiples of 256 will ensure that the block offsets within buckets,<a name="line.613"></a>
+<span class="sourceLineNo">614</span>          // that are calculated, will also be multiples of 256.<a name="line.614"></a>
+<span class="sourceLineNo">615</span>          // See BucketEntry where offset to each block is represented using 5 bytes (instead of 8<a name="line.615"></a>
+<span class="sourceLineNo">616</span>          // bytes long). We would like to save heap overhead as less as possible.<a name="line.616"></a>
+<span class="sourceLineNo">617</span>          throw new IllegalArgumentException("Illegal value: " + bucketSize + " configured for '"<a name="line.617"></a>
+<span class="sourceLineNo">618</span>              + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be multiples of 256");<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        }<a name="line.619"></a>
+<span class="sourceLineNo">620</span>        bucketSizes[i] = bucketSize;<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      }<a name="line.621"></a>
+<span class="sourceLineNo">622</span>    }<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    BucketCache bucketCache = null;<a name="line.623"></a>
+<span class="sourceLineNo">624</span>    try {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>      int ioErrorsTolerationDuration = c.getInt(<a name="line.625"></a>
+<span class="sourceLineNo">626</span>        "hbase.bucketcache.ioengine.errors.tolerated.duration",<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        BucketCache.DEFAULT_ERROR_TOLERATION_DURATION);<a name="line.627"></a>
+<span class="sourceLineNo">628</span>      // Bucket cache logs its stats on creation internal to the constructor.<a name="line.628"></a>
+<span class="sourceLineNo">629</span>      bucketCache = new BucketCache(bucketCacheIOEngineName,<a name="line.629"></a>
+<span class="sourceLineNo">630</span>        bucketCacheSize, blockSize, bucketSizes, writerThreads, writerQueueLen, persistentPath,<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        ioErrorsTolerationDuration, c);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>    } catch (IOException ioex) {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      LOG.error("Can't instantiate bucket cache", ioex); throw new RuntimeException(ioex);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
+<span class="sourceLineNo">635</span>    return bucketCache;<a name="line.635"></a>
+<span class="sourceLineNo">636</span>  }<a name="line.636"></a>
+<span class="sourceLineNo">637</span><a name="line.637"></a>
+<span class="sourceLineNo">638</span>  /**<a name="line.638"></a>
+<span class="sourceLineNo">639</span>   * Returns the block cache or &lt;code&gt;null&lt;/code&gt; in case none should be used.<a name="line.639"></a>
+<span class="sourceLineNo">640</span>   * Sets GLOBAL_BLOCK_CACHE_INSTANCE<a name="line.640"></a>
+<span class="sourceLineNo">641</span>   *<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * @param conf  The current configuration.<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   * @return The block cache or &lt;code&gt;null&lt;/code&gt;.<a name="line.643"></a>
+<span class="sourceLineNo">644</span>   */<a name="line.644"></a>
+<span class="sourceLineNo">645</span>  public static synchronized BlockCache instantiateBlockCache(Configuration conf) {<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    if (GLOBAL_BLOCK_CACHE_INSTANCE != null) {<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      return GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.647"></a>
 <span class="sourceLineNo">648</span>    }<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    BucketCache bucketCache = null;<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    try {<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      int ioErrorsTolerationDuration = c.getInt(<a name="line.651"></a>
-<span class="sourceLineNo">652</span>        "hbase.bucketcache.ioengine.errors.tolerated.duration",<a name="line.652"></a>
-<span class="sourceLineNo">653</span>        BucketCache.DEFAULT_ERROR_TOLERATION_DURATION);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      // Bucket cache logs its stats on creation internal to the constructor.<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      bucketCache = new BucketCache(bucketCacheIOEngineName,<a name="line.655"></a>
-<span class="sourceLineNo">656</span>        bucketCacheSize, blockSize, bucketSizes, writerThreads, writerQueueLen, persistentPath,<a name="line.656"></a>
-<span class="sourceLineNo">657</span>        ioErrorsTolerationDuration, c);<a name="line.657"></a>
-<span class="sourceLineNo">658</span>    } catch (IOException ioex) {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      LOG.error("Can't instantiate bucket cache", ioex); throw new RuntimeException(ioex);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    }<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    return bucketCache;<a name="line.661"></a>
-<span class="sourceLineNo">662</span>  }<a name="line.662"></a>
-<span class="sourceLineNo">663</span><a name="line.663"></a>
-<span class="sourceLineNo">664</span>  /**<a name="line.664"></a>
-<span class="sourceLineNo">665</span>   * Returns the block cache or &lt;code&gt;null&lt;/code&gt; in case none should be used.<a name="line.665"></a>
-<span class="sourceLineNo">666</span>   * Sets GLOBAL_BLOCK_CACHE_INSTANCE<a name="line.666"></a>
-<span class="sourceLineNo">667</span>   *<a name="line.667"></a>
-<span class="sourceLineNo">668</span>   * @param conf  The current configuration.<a name="line.668"></a>
-<span class="sourceLineNo">669</span>   * @return The block cache or &lt;code&gt;null&lt;/code&gt;.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>   */<a name="line.670"></a>
-<span class="sourceLineNo">671</span>  public static synchronized BlockCache instantiateBlockCache(Configuration conf) {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    if (GLOBAL_BLOCK_CACHE_INSTANCE != null) return GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    if (blockCacheDisabled) return null;<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    LruBlockCache onHeapCache = getOnHeapCacheInternal(conf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // blockCacheDisabled is set as a side-effect of getL1Internal(), so check it again after the<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    // call.<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    if (blockCacheDisabled) return null;<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    boolean useExternal = conf.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT);<a name="line.678"></a>
-<span class="sourceLineNo">679</span>    if (useExternal) {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      L2_CACHE_INSTANCE = getExternalBlockcache(conf);<a name="line.680"></a>
-<span class="sourceLineNo">681</span>      GLOBAL_BLOCK_CACHE_INSTANCE = L2_CACHE_INSTANCE == null ? onHeapCache<a name="line.681"></a>
-<span class="sourceLineNo">682</span>          : new InclusiveCombinedBlockCache(onHeapCache, L2_CACHE_INSTANCE);<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    } else {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      // otherwise use the bucket cache.<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      L2_CACHE_INSTANCE = getBucketCache(conf);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>      if (!conf.getBoolean("hbase.bucketcache.combinedcache.enabled", true)) {<a name="line.686"></a>
-<span class="sourceLineNo">687</span>        // Non combined mode is off from 2.0<a name="line.687"></a>
-<span class="sourceLineNo">688</span>        LOG.warn(<a name="line.688"></a>
-<span class="sourceLineNo">689</span>            "From HBase 2.0 onwards only combined mode of LRU cache and bucket cache is available");<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      }<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      GLOBAL_BLOCK_CACHE_INSTANCE = L2_CACHE_INSTANCE == null ? onHeapCache<a name="line.691"></a>
-<span class="sourceLineNo">692</span>          : new CombinedBlockCache(onHeapCache, L2_CACHE_INSTANCE);<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    return GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>  }<a name="line.695"></a>
-<span class="sourceLineNo">696</span><a name="line.696"></a>
-<span class="sourceLineNo">697</span>  // Supposed to use only from tests. Some tests want to reinit the Global block cache instance<a name="line.697"></a>
-<span class="sourceLineNo">698</span>  @VisibleForTesting<a name="line.698"></a>
-<span class="sourceLineNo">699</span>  static synchronized void clearGlobalInstances() {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    ONHEAP_CACHE_INSTANCE = null;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    L2_CACHE_INSTANCE = null;<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    GLOBAL_BLOCK_CACHE_INSTANCE = null;<a name="line.702"></a>
-<span class="sourceLineNo">703</span>  }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>}<a name="line.704"></a>
+<span class="sourceLineNo">649</span>    if (blockCacheDisabled) {<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      return null;<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
+<span class="sourceLineNo">652</span>    LruBlockCache onHeapCache = getOnHeapCacheInternal(conf);<a name="line.652"></a>
+<span class="sourceLineNo">653</span>    // blockCacheDisabled is set as a side-effect of getL1Internal(), so check it again after the<a name="line.653"></a>
+<span class="sourceLineNo">654</span>    // call.<a name="line.654"></a>
+<span class="sourceLineNo">655</span>    if (blockCacheDisabled) {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>      return null;<a name="line.656"></a>
+<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
+<span class="sourceLineNo">658</span>    boolean useExternal = conf.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT);<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    if (useExternal) {<a name="line.659"></a>
+<span class="sourceLineNo">660</span>      L2_CACHE_INSTANCE = getExternalBlockcache(conf);<a name="line.660"></a>
+<span class="sourceLineNo">661</span>      GLOBAL_BLOCK_CACHE_INSTANCE = L2_CACHE_INSTANCE == null ? onHeapCache<a name="line.661"></a>
+<span class="sourceLineNo">662</span>          : new InclusiveCombinedBlockCache(onHeapCache, L2_CACHE_INSTANCE);<a name="line.662"></a>
+<span class="sourceLineNo">663</span>    } else {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>      // otherwise use the bucket cache.<a name="line.664"></a>
+<span class="sourceLineNo">665</span>      L2_CACHE_INSTANCE = getBucketCache(conf);<a name="line.665"></a>
+<span class="sourceLineNo">666</span>      if (!conf.getBoolean("hbase.bucketcache.combinedcache.enabled", true)) {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>        // Non combined mode is off from 2.0<a name="line.667"></a>
+<span class="sourceLineNo">668</span>        LOG.warn(<a name="line.668"></a>
+<span class="sourceLineNo">669</span>            "From HBase 2.0 onwards only combined mode of LRU cache and bucket cache is available");<a name="line.669"></a>
+<span class="sourceLineNo">670</span>      }<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      GLOBAL_BLOCK_CACHE_INSTANCE = L2_CACHE_INSTANCE == null ? onHeapCache<a name="line.671"></a>
+<span class="sourceLineNo">672</span>          : new CombinedBlockCache(onHeapCache, L2_CACHE_INSTANCE);<a name="line.672"></a>
+<span class="sourceLineNo">673</span>    }<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    return GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.674"></a>
+<span class="sourceLineNo">675</span>  }<a name="line.675"></a>
+<span class="sourceLineNo">676</span><a name="line.676"></a>
+<span class="sourceLineNo">677</span>  // Supposed to use only from tests. Some tests want to reinit the Global block cache instance<a name="line.677"></a>
+<span class="sourceLineNo">678</span>  @VisibleForTesting<a name="line.678"></a>
+<span class="sourceLineNo">679</span>  static synchronized void clearGlobalInstances() {<a name="line.679"></a>
+<span class="sourceLineNo">680</span>    ONHEAP_CACHE_INSTANCE = null;<a name="line.680"></a>
+<span class="sourceLineNo">681</span>    L2_CACHE_INSTANCE = null;<a name="line.681"></a>
+<span class="sourceLineNo">682</span>    GLOBAL_BLOCK_CACHE_INSTANCE = null;<a name="line.682"></a>
+<span class="sourceLineNo">683</span>  }<a name="line.683"></a>
+<span class="sourceLineNo">684</span>}<a name="line.684"></a>
 
 
 


[03/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html
index 1077518..f3faf1a 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html
@@ -84,376 +84,377 @@
 <span class="sourceLineNo">076</span>  public static void setUp() throws Exception {<a name="line.76"></a>
 <span class="sourceLineNo">077</span>    // disable compactions in this test.<a name="line.77"></a>
 <span class="sourceLineNo">078</span>    TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 10000);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>  }<a name="line.79"></a>
-<span class="sourceLineNo">080</span><a name="line.80"></a>
-<span class="sourceLineNo">081</span>  @AfterClass<a name="line.81"></a>
-<span class="sourceLineNo">082</span>  public static void tearDown() throws Exception {<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    EnvironmentEdgeManagerTestHelper.reset();<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  }<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  /**<a name="line.86"></a>
-<span class="sourceLineNo">087</span>   * Callers must afterward call {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)}<a name="line.87"></a>
-<span class="sourceLineNo">088</span>   * @param tableName<a name="line.88"></a>
-<span class="sourceLineNo">089</span>   * @param callingMethod<a name="line.89"></a>
-<span class="sourceLineNo">090</span>   * @param conf<a name="line.90"></a>
-<span class="sourceLineNo">091</span>   * @param family<a name="line.91"></a>
-<span class="sourceLineNo">092</span>   * @throws IOException<a name="line.92"></a>
-<span class="sourceLineNo">093</span>   * @return created and initialized region.<a name="line.93"></a>
-<span class="sourceLineNo">094</span>   */<a name="line.94"></a>
-<span class="sourceLineNo">095</span>  private HRegion initHRegion(byte[] tableName, String callingMethod,<a name="line.95"></a>
-<span class="sourceLineNo">096</span>      Configuration conf, String family) throws IOException {<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    HColumnDescriptor familyDesc;<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    for (int i = 0; i &lt; BLOOM_TYPE.length; i++) {<a name="line.99"></a>
-<span class="sourceLineNo">100</span>      BloomType bloomType = BLOOM_TYPE[i];<a name="line.100"></a>
-<span class="sourceLineNo">101</span>      familyDesc = new HColumnDescriptor(family + "_" + bloomType)<a name="line.101"></a>
-<span class="sourceLineNo">102</span>          .setBlocksize(1)<a name="line.102"></a>
-<span class="sourceLineNo">103</span>          .setBloomFilterType(BLOOM_TYPE[i]);<a name="line.103"></a>
-<span class="sourceLineNo">104</span>      htd.addFamily(familyDesc);<a name="line.104"></a>
-<span class="sourceLineNo">105</span>    }<a name="line.105"></a>
-<span class="sourceLineNo">106</span><a name="line.106"></a>
-<span class="sourceLineNo">107</span>    HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    Path path = new Path(DIR + callingMethod);<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    HRegion r = HBaseTestingUtility.createRegionAndWAL(info, path, conf, htd);<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    blockCache = new CacheConfig(conf).getBlockCache();<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    return r;<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  }<a name="line.112"></a>
-<span class="sourceLineNo">113</span><a name="line.113"></a>
-<span class="sourceLineNo">114</span>  private void putData(String family, String row, String col, long version)<a name="line.114"></a>
-<span class="sourceLineNo">115</span>      throws IOException {<a name="line.115"></a>
-<span class="sourceLineNo">116</span>    for (int i = 0; i &lt; BLOOM_TYPE.length; i++) {<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      putData(Bytes.toBytes(family + "_" + BLOOM_TYPE[i]), row, col, version,<a name="line.117"></a>
-<span class="sourceLineNo">118</span>          version);<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    }<a name="line.119"></a>
-<span class="sourceLineNo">120</span>  }<a name="line.120"></a>
-<span class="sourceLineNo">121</span><a name="line.121"></a>
-<span class="sourceLineNo">122</span>  // generates a value to put for a row/col/version.<a name="line.122"></a>
-<span class="sourceLineNo">123</span>  private static byte[] genValue(String row, String col, long version) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    return Bytes.toBytes("Value:" + row + "#" + col + "#" + version);<a name="line.124"></a>
-<span class="sourceLineNo">125</span>  }<a name="line.125"></a>
-<span class="sourceLineNo">126</span><a name="line.126"></a>
-<span class="sourceLineNo">127</span>  private void putData(byte[] cf, String row, String col, long versionStart,<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      long versionEnd) throws IOException {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    byte columnBytes[] = Bytes.toBytes(col);<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    Put put = new Put(Bytes.toBytes(row));<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    put.setDurability(Durability.SKIP_WAL);<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>    for (long version = versionStart; version &lt;= versionEnd; version++) {<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      put.addColumn(cf, columnBytes, version, genValue(row, col, version));<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    }<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    region.put(put);<a name="line.136"></a>
-<span class="sourceLineNo">137</span>  }<a name="line.137"></a>
-<span class="sourceLineNo">138</span><a name="line.138"></a>
-<span class="sourceLineNo">139</span>  private Cell[] getData(String family, String row, List&lt;String&gt; columns,<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      int expBlocks) throws IOException {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    return getData(family, row, columns, expBlocks, expBlocks, expBlocks);<a name="line.141"></a>
-<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
-<span class="sourceLineNo">143</span><a name="line.143"></a>
-<span class="sourceLineNo">144</span>  private Cell[] getData(String family, String row, List&lt;String&gt; columns,<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      int expBlocksRowCol, int expBlocksRow, int expBlocksNone)<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      throws IOException {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    int[] expBlocks = new int[] { expBlocksRowCol, expBlocksRow, expBlocksNone };<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    Cell[] kvs = null;<a name="line.148"></a>
-<span class="sourceLineNo">149</span><a name="line.149"></a>
-<span class="sourceLineNo">150</span>    for (int i = 0; i &lt; BLOOM_TYPE.length; i++) {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      BloomType bloomType = BLOOM_TYPE[i];<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      byte[] cf = Bytes.toBytes(family + "_" + bloomType);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      long blocksStart = getBlkAccessCount(cf);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>      Get get = new Get(Bytes.toBytes(row));<a name="line.154"></a>
-<span class="sourceLineNo">155</span><a name="line.155"></a>
-<span class="sourceLineNo">156</span>      for (String column : columns) {<a name="line.156"></a>
-<span class="sourceLineNo">157</span>        get.addColumn(cf, Bytes.toBytes(column));<a name="line.157"></a>
-<span class="sourceLineNo">158</span>      }<a name="line.158"></a>
-<span class="sourceLineNo">159</span><a name="line.159"></a>
-<span class="sourceLineNo">160</span>      kvs = region.get(get).rawCells();<a name="line.160"></a>
-<span class="sourceLineNo">161</span>      long blocksEnd = getBlkAccessCount(cf);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      if (expBlocks[i] != -1) {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>        assertEquals("Blocks Read Check for Bloom: " + bloomType, expBlocks[i],<a name="line.163"></a>
-<span class="sourceLineNo">164</span>            blocksEnd - blocksStart);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>      }<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      System.out.println("Blocks Read for Bloom: " + bloomType + " = "<a name="line.166"></a>
-<span class="sourceLineNo">167</span>          + (blocksEnd - blocksStart) + "Expected = " + expBlocks[i]);<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    }<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    return kvs;<a name="line.169"></a>
-<span class="sourceLineNo">170</span>  }<a name="line.170"></a>
-<span class="sourceLineNo">171</span><a name="line.171"></a>
-<span class="sourceLineNo">172</span>  private Cell[] getData(String family, String row, String column,<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      int expBlocks) throws IOException {<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    return getData(family, row, Arrays.asList(column), expBlocks, expBlocks,<a name="line.174"></a>
-<span class="sourceLineNo">175</span>        expBlocks);<a name="line.175"></a>
-<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
-<span class="sourceLineNo">177</span><a name="line.177"></a>
-<span class="sourceLineNo">178</span>  private Cell[] getData(String family, String row, String column,<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      int expBlocksRowCol, int expBlocksRow, int expBlocksNone)<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      throws IOException {<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    return getData(family, row, Arrays.asList(column), expBlocksRowCol,<a name="line.181"></a>
-<span class="sourceLineNo">182</span>        expBlocksRow, expBlocksNone);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>  }<a name="line.183"></a>
-<span class="sourceLineNo">184</span><a name="line.184"></a>
-<span class="sourceLineNo">185</span>  private void deleteFamily(String family, String row, long version)<a name="line.185"></a>
-<span class="sourceLineNo">186</span>      throws IOException {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    Delete del = new Delete(Bytes.toBytes(row));<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    del.addFamily(Bytes.toBytes(family + "_ROWCOL"), version);<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    del.addFamily(Bytes.toBytes(family + "_ROW"), version);<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    del.addFamily(Bytes.toBytes(family + "_NONE"), version);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>    region.delete(del);<a name="line.191"></a>
-<span class="sourceLineNo">192</span>  }<a name="line.192"></a>
-<span class="sourceLineNo">193</span><a name="line.193"></a>
-<span class="sourceLineNo">194</span>  private static void verifyData(Cell kv, String expectedRow,<a name="line.194"></a>
-<span class="sourceLineNo">195</span>      String expectedCol, long expectedVersion) {<a name="line.195"></a>
-<span class="sourceLineNo">196</span>    assertTrue("RowCheck", CellUtil.matchingRows(kv,  Bytes.toBytes(expectedRow)));<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    assertTrue("ColumnCheck", CellUtil.matchingQualifier(kv, Bytes.toBytes(expectedCol)));<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    assertEquals("TSCheck", expectedVersion, kv.getTimestamp());<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    assertTrue("ValueCheck", CellUtil.matchingValue(kv, genValue(expectedRow, expectedCol, expectedVersion)));<a name="line.199"></a>
-<span class="sourceLineNo">200</span>  }<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>  private static long getBlkAccessCount(byte[] cf) {<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      return HFile.DATABLOCK_READ_COUNT.sum();<a name="line.203"></a>
-<span class="sourceLineNo">204</span>  }<a name="line.204"></a>
-<span class="sourceLineNo">205</span><a name="line.205"></a>
-<span class="sourceLineNo">206</span>  private static long getBlkCount() {<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    return blockCache.getBlockCount();<a name="line.207"></a>
-<span class="sourceLineNo">208</span>  }<a name="line.208"></a>
-<span class="sourceLineNo">209</span><a name="line.209"></a>
-<span class="sourceLineNo">210</span>  /**<a name="line.210"></a>
-<span class="sourceLineNo">211</span>   * Test # of blocks read for some simple seek cases.<a name="line.211"></a>
-<span class="sourceLineNo">212</span>   *<a name="line.212"></a>
-<span class="sourceLineNo">213</span>   * @throws Exception<a name="line.213"></a>
-<span class="sourceLineNo">214</span>   */<a name="line.214"></a>
-<span class="sourceLineNo">215</span>  @Test<a name="line.215"></a>
-<span class="sourceLineNo">216</span>  public void testBlocksRead() throws Exception {<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    byte[] TABLE = Bytes.toBytes("testBlocksRead");<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    String FAMILY = "cf1";<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    Cell kvs[];<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    this.region = initHRegion(TABLE, testName.getMethodName(), conf, FAMILY);<a name="line.220"></a>
-<span class="sourceLineNo">221</span><a name="line.221"></a>
-<span class="sourceLineNo">222</span>    try {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>      putData(FAMILY, "row", "col1", 1);<a name="line.223"></a>
-<span class="sourceLineNo">224</span>      putData(FAMILY, "row", "col2", 2);<a name="line.224"></a>
-<span class="sourceLineNo">225</span>      putData(FAMILY, "row", "col3", 3);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>      putData(FAMILY, "row", "col4", 4);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      putData(FAMILY, "row", "col5", 5);<a name="line.227"></a>
-<span class="sourceLineNo">228</span>      putData(FAMILY, "row", "col6", 6);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>      putData(FAMILY, "row", "col7", 7);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>      region.flush(true);<a name="line.230"></a>
-<span class="sourceLineNo">231</span><a name="line.231"></a>
-<span class="sourceLineNo">232</span>      // Expected block reads: 1<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      // The top block has the KV we are<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      // interested. So only 1 seek is needed.<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      kvs = getData(FAMILY, "row", "col1", 1);<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      assertEquals(1, kvs.length);<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      verifyData(kvs[0], "row", "col1", 1);<a name="line.237"></a>
-<span class="sourceLineNo">238</span><a name="line.238"></a>
-<span class="sourceLineNo">239</span>      // Expected block reads: 2<a name="line.239"></a>
-<span class="sourceLineNo">240</span>      // The top block and next block has the KVs we are<a name="line.240"></a>
-<span class="sourceLineNo">241</span>      // interested. So only 2 seek is needed.<a name="line.241"></a>
-<span class="sourceLineNo">242</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1", "col2"), 2);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>      assertEquals(2, kvs.length);<a name="line.243"></a>
-<span class="sourceLineNo">244</span>      verifyData(kvs[0], "row", "col1", 1);<a name="line.244"></a>
-<span class="sourceLineNo">245</span>      verifyData(kvs[1], "row", "col2", 2);<a name="line.245"></a>
-<span class="sourceLineNo">246</span><a name="line.246"></a>
-<span class="sourceLineNo">247</span>      // Expected block reads: 3<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      // The first 2 seeks is to find out col2. [HBASE-4443]<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      // One additional seek for col3<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      // So 3 seeks are needed.<a name="line.250"></a>
-<span class="sourceLineNo">251</span>      kvs = getData(FAMILY, "row", Arrays.asList("col2", "col3"), 2);<a name="line.251"></a>
-<span class="sourceLineNo">252</span>      assertEquals(2, kvs.length);<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      verifyData(kvs[0], "row", "col2", 2);<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      verifyData(kvs[1], "row", "col3", 3);<a name="line.254"></a>
-<span class="sourceLineNo">255</span><a name="line.255"></a>
-<span class="sourceLineNo">256</span>      // Expected block reads: 1. [HBASE-4443]&amp;[HBASE-7845]<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      kvs = getData(FAMILY, "row", Arrays.asList("col5"), 1);<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      assertEquals(1, kvs.length);<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      verifyData(kvs[0], "row", "col5", 5);<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    } finally {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      this.region = null;<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    }<a name="line.263"></a>
-<span class="sourceLineNo">264</span>  }<a name="line.264"></a>
-<span class="sourceLineNo">265</span><a name="line.265"></a>
-<span class="sourceLineNo">266</span>  /**<a name="line.266"></a>
-<span class="sourceLineNo">267</span>   * Test # of blocks read (targeted at some of the cases Lazy Seek optimizes).<a name="line.267"></a>
-<span class="sourceLineNo">268</span>   *<a name="line.268"></a>
-<span class="sourceLineNo">269</span>   * @throws Exception<a name="line.269"></a>
-<span class="sourceLineNo">270</span>   */<a name="line.270"></a>
-<span class="sourceLineNo">271</span>  @Test<a name="line.271"></a>
-<span class="sourceLineNo">272</span>  public void testLazySeekBlocksRead() throws Exception {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    byte[] TABLE = Bytes.toBytes("testLazySeekBlocksRead");<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    String FAMILY = "cf1";<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    Cell kvs[];<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    this.region = initHRegion(TABLE, testName.getMethodName(), conf, FAMILY);<a name="line.276"></a>
-<span class="sourceLineNo">277</span><a name="line.277"></a>
-<span class="sourceLineNo">278</span>    try {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      // File 1<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      putData(FAMILY, "row", "col1", 1);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      putData(FAMILY, "row", "col2", 2);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      region.flush(true);<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>      // File 2<a name="line.284"></a>
-<span class="sourceLineNo">285</span>      putData(FAMILY, "row", "col1", 3);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>      putData(FAMILY, "row", "col2", 4);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>      region.flush(true);<a name="line.287"></a>
-<span class="sourceLineNo">288</span><a name="line.288"></a>
-<span class="sourceLineNo">289</span>      // Expected blocks read: 1.<a name="line.289"></a>
-<span class="sourceLineNo">290</span>      // File 2's top block is also the KV we are<a name="line.290"></a>
-<span class="sourceLineNo">291</span>      // interested. So only 1 seek is needed.<a name="line.291"></a>
-<span class="sourceLineNo">292</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1"), 1);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>      assertEquals(1, kvs.length);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>      verifyData(kvs[0], "row", "col1", 3);<a name="line.294"></a>
-<span class="sourceLineNo">295</span><a name="line.295"></a>
-<span class="sourceLineNo">296</span>      // Expected blocks read: 2<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      // File 2's top block has the "col1" KV we are<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      // interested. We also need "col2" which is in a block<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      // of its own. So, we need that block as well.<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1", "col2"), 2);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      assertEquals(2, kvs.length);<a name="line.301"></a>
-<span class="sourceLineNo">302</span>      verifyData(kvs[0], "row", "col1", 3);<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      verifyData(kvs[1], "row", "col2", 4);<a name="line.303"></a>
-<span class="sourceLineNo">304</span><a name="line.304"></a>
-<span class="sourceLineNo">305</span>      // File 3: Add another column<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      putData(FAMILY, "row", "col3", 5);<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      region.flush(true);<a name="line.307"></a>
-<span class="sourceLineNo">308</span><a name="line.308"></a>
-<span class="sourceLineNo">309</span>      // Expected blocks read: 1<a name="line.309"></a>
-<span class="sourceLineNo">310</span>      // File 3's top block has the "col3" KV we are<a name="line.310"></a>
-<span class="sourceLineNo">311</span>      // interested. So only 1 seek is needed.<a name="line.311"></a>
-<span class="sourceLineNo">312</span>      kvs = getData(FAMILY, "row", "col3", 1);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>      assertEquals(1, kvs.length);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>      verifyData(kvs[0], "row", "col3", 5);<a name="line.314"></a>
-<span class="sourceLineNo">315</span><a name="line.315"></a>
-<span class="sourceLineNo">316</span>      // Get a column from older file.<a name="line.316"></a>
-<span class="sourceLineNo">317</span>      // For ROWCOL Bloom filter: Expected blocks read: 1.<a name="line.317"></a>
-<span class="sourceLineNo">318</span>      // For ROW Bloom filter: Expected blocks read: 2.<a name="line.318"></a>
-<span class="sourceLineNo">319</span>      // For NONE Bloom filter: Expected blocks read: 2.<a name="line.319"></a>
-<span class="sourceLineNo">320</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1"), 1, 2, 2);<a name="line.320"></a>
-<span class="sourceLineNo">321</span>      assertEquals(1, kvs.length);<a name="line.321"></a>
-<span class="sourceLineNo">322</span>      verifyData(kvs[0], "row", "col1", 3);<a name="line.322"></a>
-<span class="sourceLineNo">323</span><a name="line.323"></a>
-<span class="sourceLineNo">324</span>      // File 4: Delete the entire row.<a name="line.324"></a>
-<span class="sourceLineNo">325</span>      deleteFamily(FAMILY, "row", 6);<a name="line.325"></a>
-<span class="sourceLineNo">326</span>      region.flush(true);<a name="line.326"></a>
-<span class="sourceLineNo">327</span><a name="line.327"></a>
-<span class="sourceLineNo">328</span>      // For ROWCOL Bloom filter: Expected blocks read: 2.<a name="line.328"></a>
-<span class="sourceLineNo">329</span>      // For ROW Bloom filter: Expected blocks read: 3.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>      // For NONE Bloom filter: Expected blocks read: 3.<a name="line.330"></a>
-<span class="sourceLineNo">331</span>      kvs = getData(FAMILY, "row", "col1", 2, 3, 3);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>      assertEquals(0, kvs.length);<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      kvs = getData(FAMILY, "row", "col2", 2, 3, 3);<a name="line.333"></a>
-<span class="sourceLineNo">334</span>      assertEquals(0, kvs.length);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      kvs = getData(FAMILY, "row", "col3", 2);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>      assertEquals(0, kvs.length);<a name="line.336"></a>
-<span class="sourceLineNo">337</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1", "col2", "col3"), 4);<a name="line.337"></a>
-<span class="sourceLineNo">338</span>      assertEquals(0, kvs.length);<a name="line.338"></a>
-<span class="sourceLineNo">339</span><a name="line.339"></a>
-<span class="sourceLineNo">340</span>      // File 5: Delete<a name="line.340"></a>
-<span class="sourceLineNo">341</span>      deleteFamily(FAMILY, "row", 10);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>      region.flush(true);<a name="line.342"></a>
-<span class="sourceLineNo">343</span><a name="line.343"></a>
-<span class="sourceLineNo">344</span>      // File 6: some more puts, but with timestamps older than the<a name="line.344"></a>
-<span class="sourceLineNo">345</span>      // previous delete.<a name="line.345"></a>
-<span class="sourceLineNo">346</span>      putData(FAMILY, "row", "col1", 7);<a name="line.346"></a>
-<span class="sourceLineNo">347</span>      putData(FAMILY, "row", "col2", 8);<a name="line.347"></a>
-<span class="sourceLineNo">348</span>      putData(FAMILY, "row", "col3", 9);<a name="line.348"></a>
-<span class="sourceLineNo">349</span>      region.flush(true);<a name="line.349"></a>
-<span class="sourceLineNo">350</span><a name="line.350"></a>
-<span class="sourceLineNo">351</span>      // Baseline expected blocks read: 6. [HBASE-4532]<a name="line.351"></a>
-<span class="sourceLineNo">352</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1", "col2", "col3"), 6, 7, 7);<a name="line.352"></a>
-<span class="sourceLineNo">353</span>      assertEquals(0, kvs.length);<a name="line.353"></a>
-<span class="sourceLineNo">354</span><a name="line.354"></a>
-<span class="sourceLineNo">355</span>      // File 7: Put back new data<a name="line.355"></a>
-<span class="sourceLineNo">356</span>      putData(FAMILY, "row", "col1", 11);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>      putData(FAMILY, "row", "col2", 12);<a name="line.357"></a>
-<span class="sourceLineNo">358</span>      putData(FAMILY, "row", "col3", 13);<a name="line.358"></a>
-<span class="sourceLineNo">359</span>      region.flush(true);<a name="line.359"></a>
-<span class="sourceLineNo">360</span><a name="line.360"></a>
+<span class="sourceLineNo">079</span>    CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration());<a name="line.79"></a>
+<span class="sourceLineNo">080</span>  }<a name="line.80"></a>
+<span class="sourceLineNo">081</span><a name="line.81"></a>
+<span class="sourceLineNo">082</span>  @AfterClass<a name="line.82"></a>
+<span class="sourceLineNo">083</span>  public static void tearDown() throws Exception {<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    EnvironmentEdgeManagerTestHelper.reset();<a name="line.84"></a>
+<span class="sourceLineNo">085</span>  }<a name="line.85"></a>
+<span class="sourceLineNo">086</span><a name="line.86"></a>
+<span class="sourceLineNo">087</span>  /**<a name="line.87"></a>
+<span class="sourceLineNo">088</span>   * Callers must afterward call {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)}<a name="line.88"></a>
+<span class="sourceLineNo">089</span>   * @param tableName<a name="line.89"></a>
+<span class="sourceLineNo">090</span>   * @param callingMethod<a name="line.90"></a>
+<span class="sourceLineNo">091</span>   * @param conf<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   * @param family<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   * @throws IOException<a name="line.93"></a>
+<span class="sourceLineNo">094</span>   * @return created and initialized region.<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  private HRegion initHRegion(byte[] tableName, String callingMethod,<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      Configuration conf, String family) throws IOException {<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    HColumnDescriptor familyDesc;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    for (int i = 0; i &lt; BLOOM_TYPE.length; i++) {<a name="line.100"></a>
+<span class="sourceLineNo">101</span>      BloomType bloomType = BLOOM_TYPE[i];<a name="line.101"></a>
+<span class="sourceLineNo">102</span>      familyDesc = new HColumnDescriptor(family + "_" + bloomType)<a name="line.102"></a>
+<span class="sourceLineNo">103</span>          .setBlocksize(1)<a name="line.103"></a>
+<span class="sourceLineNo">104</span>          .setBloomFilterType(BLOOM_TYPE[i]);<a name="line.104"></a>
+<span class="sourceLineNo">105</span>      htd.addFamily(familyDesc);<a name="line.105"></a>
+<span class="sourceLineNo">106</span>    }<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>    HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    Path path = new Path(DIR + callingMethod);<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    HRegion r = HBaseTestingUtility.createRegionAndWAL(info, path, conf, htd);<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    blockCache = new CacheConfig(conf).getBlockCache();<a name="line.111"></a>
+<span class="sourceLineNo">112</span>    return r;<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  }<a name="line.113"></a>
+<span class="sourceLineNo">114</span><a name="line.114"></a>
+<span class="sourceLineNo">115</span>  private void putData(String family, String row, String col, long version)<a name="line.115"></a>
+<span class="sourceLineNo">116</span>      throws IOException {<a name="line.116"></a>
+<span class="sourceLineNo">117</span>    for (int i = 0; i &lt; BLOOM_TYPE.length; i++) {<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      putData(Bytes.toBytes(family + "_" + BLOOM_TYPE[i]), row, col, version,<a name="line.118"></a>
+<span class="sourceLineNo">119</span>          version);<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    }<a name="line.120"></a>
+<span class="sourceLineNo">121</span>  }<a name="line.121"></a>
+<span class="sourceLineNo">122</span><a name="line.122"></a>
+<span class="sourceLineNo">123</span>  // generates a value to put for a row/col/version.<a name="line.123"></a>
+<span class="sourceLineNo">124</span>  private static byte[] genValue(String row, String col, long version) {<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    return Bytes.toBytes("Value:" + row + "#" + col + "#" + version);<a name="line.125"></a>
+<span class="sourceLineNo">126</span>  }<a name="line.126"></a>
+<span class="sourceLineNo">127</span><a name="line.127"></a>
+<span class="sourceLineNo">128</span>  private void putData(byte[] cf, String row, String col, long versionStart,<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      long versionEnd) throws IOException {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    byte columnBytes[] = Bytes.toBytes(col);<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    Put put = new Put(Bytes.toBytes(row));<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    put.setDurability(Durability.SKIP_WAL);<a name="line.132"></a>
+<span class="sourceLineNo">133</span><a name="line.133"></a>
+<span class="sourceLineNo">134</span>    for (long version = versionStart; version &lt;= versionEnd; version++) {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      put.addColumn(cf, columnBytes, version, genValue(row, col, version));<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    region.put(put);<a name="line.137"></a>
+<span class="sourceLineNo">138</span>  }<a name="line.138"></a>
+<span class="sourceLineNo">139</span><a name="line.139"></a>
+<span class="sourceLineNo">140</span>  private Cell[] getData(String family, String row, List&lt;String&gt; columns,<a name="line.140"></a>
+<span class="sourceLineNo">141</span>      int expBlocks) throws IOException {<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    return getData(family, row, columns, expBlocks, expBlocks, expBlocks);<a name="line.142"></a>
+<span class="sourceLineNo">143</span>  }<a name="line.143"></a>
+<span class="sourceLineNo">144</span><a name="line.144"></a>
+<span class="sourceLineNo">145</span>  private Cell[] getData(String family, String row, List&lt;String&gt; columns,<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      int expBlocksRowCol, int expBlocksRow, int expBlocksNone)<a name="line.146"></a>
+<span class="sourceLineNo">147</span>      throws IOException {<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    int[] expBlocks = new int[] { expBlocksRowCol, expBlocksRow, expBlocksNone };<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    Cell[] kvs = null;<a name="line.149"></a>
+<span class="sourceLineNo">150</span><a name="line.150"></a>
+<span class="sourceLineNo">151</span>    for (int i = 0; i &lt; BLOOM_TYPE.length; i++) {<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      BloomType bloomType = BLOOM_TYPE[i];<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      byte[] cf = Bytes.toBytes(family + "_" + bloomType);<a name="line.153"></a>
+<span class="sourceLineNo">154</span>      long blocksStart = getBlkAccessCount(cf);<a name="line.154"></a>
+<span class="sourceLineNo">155</span>      Get get = new Get(Bytes.toBytes(row));<a name="line.155"></a>
+<span class="sourceLineNo">156</span><a name="line.156"></a>
+<span class="sourceLineNo">157</span>      for (String column : columns) {<a name="line.157"></a>
+<span class="sourceLineNo">158</span>        get.addColumn(cf, Bytes.toBytes(column));<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      }<a name="line.159"></a>
+<span class="sourceLineNo">160</span><a name="line.160"></a>
+<span class="sourceLineNo">161</span>      kvs = region.get(get).rawCells();<a name="line.161"></a>
+<span class="sourceLineNo">162</span>      long blocksEnd = getBlkAccessCount(cf);<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      if (expBlocks[i] != -1) {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>        assertEquals("Blocks Read Check for Bloom: " + bloomType, expBlocks[i],<a name="line.164"></a>
+<span class="sourceLineNo">165</span>            blocksEnd - blocksStart);<a name="line.165"></a>
+<span class="sourceLineNo">166</span>      }<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      System.out.println("Blocks Read for Bloom: " + bloomType + " = "<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          + (blocksEnd - blocksStart) + "Expected = " + expBlocks[i]);<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    }<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    return kvs;<a name="line.170"></a>
+<span class="sourceLineNo">171</span>  }<a name="line.171"></a>
+<span class="sourceLineNo">172</span><a name="line.172"></a>
+<span class="sourceLineNo">173</span>  private Cell[] getData(String family, String row, String column,<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      int expBlocks) throws IOException {<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    return getData(family, row, Arrays.asList(column), expBlocks, expBlocks,<a name="line.175"></a>
+<span class="sourceLineNo">176</span>        expBlocks);<a name="line.176"></a>
+<span class="sourceLineNo">177</span>  }<a name="line.177"></a>
+<span class="sourceLineNo">178</span><a name="line.178"></a>
+<span class="sourceLineNo">179</span>  private Cell[] getData(String family, String row, String column,<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      int expBlocksRowCol, int expBlocksRow, int expBlocksNone)<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      throws IOException {<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    return getData(family, row, Arrays.asList(column), expBlocksRowCol,<a name="line.182"></a>
+<span class="sourceLineNo">183</span>        expBlocksRow, expBlocksNone);<a name="line.183"></a>
+<span class="sourceLineNo">184</span>  }<a name="line.184"></a>
+<span class="sourceLineNo">185</span><a name="line.185"></a>
+<span class="sourceLineNo">186</span>  private void deleteFamily(String family, String row, long version)<a name="line.186"></a>
+<span class="sourceLineNo">187</span>      throws IOException {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    Delete del = new Delete(Bytes.toBytes(row));<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    del.addFamily(Bytes.toBytes(family + "_ROWCOL"), version);<a name="line.189"></a>
+<span class="sourceLineNo">190</span>    del.addFamily(Bytes.toBytes(family + "_ROW"), version);<a name="line.190"></a>
+<span class="sourceLineNo">191</span>    del.addFamily(Bytes.toBytes(family + "_NONE"), version);<a name="line.191"></a>
+<span class="sourceLineNo">192</span>    region.delete(del);<a name="line.192"></a>
+<span class="sourceLineNo">193</span>  }<a name="line.193"></a>
+<span class="sourceLineNo">194</span><a name="line.194"></a>
+<span class="sourceLineNo">195</span>  private static void verifyData(Cell kv, String expectedRow,<a name="line.195"></a>
+<span class="sourceLineNo">196</span>      String expectedCol, long expectedVersion) {<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    assertTrue("RowCheck", CellUtil.matchingRows(kv,  Bytes.toBytes(expectedRow)));<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    assertTrue("ColumnCheck", CellUtil.matchingQualifier(kv, Bytes.toBytes(expectedCol)));<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    assertEquals("TSCheck", expectedVersion, kv.getTimestamp());<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    assertTrue("ValueCheck", CellUtil.matchingValue(kv, genValue(expectedRow, expectedCol, expectedVersion)));<a name="line.200"></a>
+<span class="sourceLineNo">201</span>  }<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>  private static long getBlkAccessCount(byte[] cf) {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      return HFile.DATABLOCK_READ_COUNT.sum();<a name="line.204"></a>
+<span class="sourceLineNo">205</span>  }<a name="line.205"></a>
+<span class="sourceLineNo">206</span><a name="line.206"></a>
+<span class="sourceLineNo">207</span>  private static long getBlkCount() {<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    return blockCache.getBlockCount();<a name="line.208"></a>
+<span class="sourceLineNo">209</span>  }<a name="line.209"></a>
+<span class="sourceLineNo">210</span><a name="line.210"></a>
+<span class="sourceLineNo">211</span>  /**<a name="line.211"></a>
+<span class="sourceLineNo">212</span>   * Test # of blocks read for some simple seek cases.<a name="line.212"></a>
+<span class="sourceLineNo">213</span>   *<a name="line.213"></a>
+<span class="sourceLineNo">214</span>   * @throws Exception<a name="line.214"></a>
+<span class="sourceLineNo">215</span>   */<a name="line.215"></a>
+<span class="sourceLineNo">216</span>  @Test<a name="line.216"></a>
+<span class="sourceLineNo">217</span>  public void testBlocksRead() throws Exception {<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    byte[] TABLE = Bytes.toBytes("testBlocksRead");<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    String FAMILY = "cf1";<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    Cell kvs[];<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    this.region = initHRegion(TABLE, testName.getMethodName(), conf, FAMILY);<a name="line.221"></a>
+<span class="sourceLineNo">222</span><a name="line.222"></a>
+<span class="sourceLineNo">223</span>    try {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>      putData(FAMILY, "row", "col1", 1);<a name="line.224"></a>
+<span class="sourceLineNo">225</span>      putData(FAMILY, "row", "col2", 2);<a name="line.225"></a>
+<span class="sourceLineNo">226</span>      putData(FAMILY, "row", "col3", 3);<a name="line.226"></a>
+<span class="sourceLineNo">227</span>      putData(FAMILY, "row", "col4", 4);<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      putData(FAMILY, "row", "col5", 5);<a name="line.228"></a>
+<span class="sourceLineNo">229</span>      putData(FAMILY, "row", "col6", 6);<a name="line.229"></a>
+<span class="sourceLineNo">230</span>      putData(FAMILY, "row", "col7", 7);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>      region.flush(true);<a name="line.231"></a>
+<span class="sourceLineNo">232</span><a name="line.232"></a>
+<span class="sourceLineNo">233</span>      // Expected block reads: 1<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      // The top block has the KV we are<a name="line.234"></a>
+<span class="sourceLineNo">235</span>      // interested. So only 1 seek is needed.<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      kvs = getData(FAMILY, "row", "col1", 1);<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      assertEquals(1, kvs.length);<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      verifyData(kvs[0], "row", "col1", 1);<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>      // Expected block reads: 2<a name="line.240"></a>
+<span class="sourceLineNo">241</span>      // The top block and next block has the KVs we are<a name="line.241"></a>
+<span class="sourceLineNo">242</span>      // interested. So only 2 seek is needed.<a name="line.242"></a>
+<span class="sourceLineNo">243</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1", "col2"), 2);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>      assertEquals(2, kvs.length);<a name="line.244"></a>
+<span class="sourceLineNo">245</span>      verifyData(kvs[0], "row", "col1", 1);<a name="line.245"></a>
+<span class="sourceLineNo">246</span>      verifyData(kvs[1], "row", "col2", 2);<a name="line.246"></a>
+<span class="sourceLineNo">247</span><a name="line.247"></a>
+<span class="sourceLineNo">248</span>      // Expected block reads: 3<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      // The first 2 seeks is to find out col2. [HBASE-4443]<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      // One additional seek for col3<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      // So 3 seeks are needed.<a name="line.251"></a>
+<span class="sourceLineNo">252</span>      kvs = getData(FAMILY, "row", Arrays.asList("col2", "col3"), 2);<a name="line.252"></a>
+<span class="sourceLineNo">253</span>      assertEquals(2, kvs.length);<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      verifyData(kvs[0], "row", "col2", 2);<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      verifyData(kvs[1], "row", "col3", 3);<a name="line.255"></a>
+<span class="sourceLineNo">256</span><a name="line.256"></a>
+<span class="sourceLineNo">257</span>      // Expected block reads: 1. [HBASE-4443]&amp;[HBASE-7845]<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      kvs = getData(FAMILY, "row", Arrays.asList("col5"), 1);<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      assertEquals(1, kvs.length);<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      verifyData(kvs[0], "row", "col5", 5);<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    } finally {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      this.region = null;<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
+<span class="sourceLineNo">265</span>  }<a name="line.265"></a>
+<span class="sourceLineNo">266</span><a name="line.266"></a>
+<span class="sourceLineNo">267</span>  /**<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * Test # of blocks read (targeted at some of the cases Lazy Seek optimizes).<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   *<a name="line.269"></a>
+<span class="sourceLineNo">270</span>   * @throws Exception<a name="line.270"></a>
+<span class="sourceLineNo">271</span>   */<a name="line.271"></a>
+<span class="sourceLineNo">272</span>  @Test<a name="line.272"></a>
+<span class="sourceLineNo">273</span>  public void testLazySeekBlocksRead() throws Exception {<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    byte[] TABLE = Bytes.toBytes("testLazySeekBlocksRead");<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    String FAMILY = "cf1";<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    Cell kvs[];<a name="line.276"></a>
+<span class="sourceLineNo">277</span>    this.region = initHRegion(TABLE, testName.getMethodName(), conf, FAMILY);<a name="line.277"></a>
+<span class="sourceLineNo">278</span><a name="line.278"></a>
+<span class="sourceLineNo">279</span>    try {<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      // File 1<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      putData(FAMILY, "row", "col1", 1);<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      putData(FAMILY, "row", "col2", 2);<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      region.flush(true);<a name="line.283"></a>
+<span class="sourceLineNo">284</span><a name="line.284"></a>
+<span class="sourceLineNo">285</span>      // File 2<a name="line.285"></a>
+<span class="sourceLineNo">286</span>      putData(FAMILY, "row", "col1", 3);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      putData(FAMILY, "row", "col2", 4);<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      region.flush(true);<a name="line.288"></a>
+<span class="sourceLineNo">289</span><a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // Expected blocks read: 1.<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      // File 2's top block is also the KV we are<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      // interested. So only 1 seek is needed.<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1"), 1);<a name="line.293"></a>
+<span class="sourceLineNo">294</span>      assertEquals(1, kvs.length);<a name="line.294"></a>
+<span class="sourceLineNo">295</span>      verifyData(kvs[0], "row", "col1", 3);<a name="line.295"></a>
+<span class="sourceLineNo">296</span><a name="line.296"></a>
+<span class="sourceLineNo">297</span>      // Expected blocks read: 2<a name="line.297"></a>
+<span class="sourceLineNo">298</span>      // File 2's top block has the "col1" KV we are<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      // interested. We also need "col2" which is in a block<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      // of its own. So, we need that block as well.<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1", "col2"), 2);<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      assertEquals(2, kvs.length);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      verifyData(kvs[0], "row", "col1", 3);<a name="line.303"></a>
+<span class="sourceLineNo">304</span>      verifyData(kvs[1], "row", "col2", 4);<a name="line.304"></a>
+<span class="sourceLineNo">305</span><a name="line.305"></a>
+<span class="sourceLineNo">306</span>      // File 3: Add another column<a name="line.306"></a>
+<span class="sourceLineNo">307</span>      putData(FAMILY, "row", "col3", 5);<a name="line.307"></a>
+<span class="sourceLineNo">308</span>      region.flush(true);<a name="line.308"></a>
+<span class="sourceLineNo">309</span><a name="line.309"></a>
+<span class="sourceLineNo">310</span>      // Expected blocks read: 1<a name="line.310"></a>
+<span class="sourceLineNo">311</span>      // File 3's top block has the "col3" KV we are<a name="line.311"></a>
+<span class="sourceLineNo">312</span>      // interested. So only 1 seek is needed.<a name="line.312"></a>
+<span class="sourceLineNo">313</span>      kvs = getData(FAMILY, "row", "col3", 1);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>      assertEquals(1, kvs.length);<a name="line.314"></a>
+<span class="sourceLineNo">315</span>      verifyData(kvs[0], "row", "col3", 5);<a name="line.315"></a>
+<span class="sourceLineNo">316</span><a name="line.316"></a>
+<span class="sourceLineNo">317</span>      // Get a column from older file.<a name="line.317"></a>
+<span class="sourceLineNo">318</span>      // For ROWCOL Bloom filter: Expected blocks read: 1.<a name="line.318"></a>
+<span class="sourceLineNo">319</span>      // For ROW Bloom filter: Expected blocks read: 2.<a name="line.319"></a>
+<span class="sourceLineNo">320</span>      // For NONE Bloom filter: Expected blocks read: 2.<a name="line.320"></a>
+<span class="sourceLineNo">321</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1"), 1, 2, 2);<a name="line.321"></a>
+<span class="sourceLineNo">322</span>      assertEquals(1, kvs.length);<a name="line.322"></a>
+<span class="sourceLineNo">323</span>      verifyData(kvs[0], "row", "col1", 3);<a name="line.323"></a>
+<span class="sourceLineNo">324</span><a name="line.324"></a>
+<span class="sourceLineNo">325</span>      // File 4: Delete the entire row.<a name="line.325"></a>
+<span class="sourceLineNo">326</span>      deleteFamily(FAMILY, "row", 6);<a name="line.326"></a>
+<span class="sourceLineNo">327</span>      region.flush(true);<a name="line.327"></a>
+<span class="sourceLineNo">328</span><a name="line.328"></a>
+<span class="sourceLineNo">329</span>      // For ROWCOL Bloom filter: Expected blocks read: 2.<a name="line.329"></a>
+<span class="sourceLineNo">330</span>      // For ROW Bloom filter: Expected blocks read: 3.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>      // For NONE Bloom filter: Expected blocks read: 3.<a name="line.331"></a>
+<span class="sourceLineNo">332</span>      kvs = getData(FAMILY, "row", "col1", 2, 3, 3);<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      assertEquals(0, kvs.length);<a name="line.333"></a>
+<span class="sourceLineNo">334</span>      kvs = getData(FAMILY, "row", "col2", 2, 3, 3);<a name="line.334"></a>
+<span class="sourceLineNo">335</span>      assertEquals(0, kvs.length);<a name="line.335"></a>
+<span class="sourceLineNo">336</span>      kvs = getData(FAMILY, "row", "col3", 2);<a name="line.336"></a>
+<span class="sourceLineNo">337</span>      assertEquals(0, kvs.length);<a name="line.337"></a>
+<span class="sourceLineNo">338</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1", "col2", "col3"), 4);<a name="line.338"></a>
+<span class="sourceLineNo">339</span>      assertEquals(0, kvs.length);<a name="line.339"></a>
+<span class="sourceLineNo">340</span><a name="line.340"></a>
+<span class="sourceLineNo">341</span>      // File 5: Delete<a name="line.341"></a>
+<span class="sourceLineNo">342</span>      deleteFamily(FAMILY, "row", 10);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>      region.flush(true);<a name="line.343"></a>
+<span class="sourceLineNo">344</span><a name="line.344"></a>
+<span class="sourceLineNo">345</span>      // File 6: some more puts, but with timestamps older than the<a name="line.345"></a>
+<span class="sourceLineNo">346</span>      // previous delete.<a name="line.346"></a>
+<span class="sourceLineNo">347</span>      putData(FAMILY, "row", "col1", 7);<a name="line.347"></a>
+<span class="sourceLineNo">348</span>      putData(FAMILY, "row", "col2", 8);<a name="line.348"></a>
+<span class="sourceLineNo">349</span>      putData(FAMILY, "row", "col3", 9);<a name="line.349"></a>
+<span class="sourceLineNo">350</span>      region.flush(true);<a name="line.350"></a>
+<span class="sourceLineNo">351</span><a name="line.351"></a>
+<span class="sourceLineNo">352</span>      // Baseline expected blocks read: 6. [HBASE-4532]<a name="line.352"></a>
+<span class="sourceLineNo">353</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1", "col2", "col3"), 6, 7, 7);<a name="line.353"></a>
+<span class="sourceLineNo">354</span>      assertEquals(0, kvs.length);<a name="line.354"></a>
+<span class="sourceLineNo">355</span><a name="line.355"></a>
+<span class="sourceLineNo">356</span>      // File 7: Put back new data<a name="line.356"></a>
+<span class="sourceLineNo">357</span>      putData(FAMILY, "row", "col1", 11);<a name="line.357"></a>
+<span class="sourceLineNo">358</span>      putData(FAMILY, "row", "col2", 12);<a name="line.358"></a>
+<span class="sourceLineNo">359</span>      putData(FAMILY, "row", "col3", 13);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>      region.flush(true);<a name="line.360"></a>
 <span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>      // Expected blocks read: 8. [HBASE-4585, HBASE-13109]<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1", "col2", "col3"), 8, 9, 9);<a name="line.363"></a>
-<span class="sourceLineNo">364</span>      assertEquals(3, kvs.length);<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      verifyData(kvs[0], "row", "col1", 11);<a name="line.365"></a>
-<span class="sourceLineNo">366</span>      verifyData(kvs[1], "row", "col2", 12);<a name="line.366"></a>
-<span class="sourceLineNo">367</span>      verifyData(kvs[2], "row", "col3", 13);<a name="line.367"></a>
-<span class="sourceLineNo">368</span>    } finally {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      this.region = null;<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    }<a name="line.371"></a>
-<span class="sourceLineNo">372</span>  }<a name="line.372"></a>
-<span class="sourceLineNo">373</span><a name="line.373"></a>
-<span class="sourceLineNo">374</span>  /**<a name="line.374"></a>
-<span class="sourceLineNo">375</span>   * Test # of blocks read to ensure disabling cache-fill on Scan works.<a name="line.375"></a>
-<span class="sourceLineNo">376</span>   * @throws Exception<a name="line.376"></a>
-<span class="sourceLineNo">377</span>   */<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  @Test<a name="line.378"></a>
-<span class="sourceLineNo">379</span>  public void testBlocksStoredWhenCachingDisabled() throws Exception {<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    byte [] TABLE = Bytes.toBytes("testBlocksReadWhenCachingDisabled");<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    String FAMILY = "cf1";<a name="line.381"></a>
-<span class="sourceLineNo">382</span><a name="line.382"></a>
-<span class="sourceLineNo">383</span>    this.region = initHRegion(TABLE, testName.getMethodName(), conf, FAMILY);<a name="line.383"></a>
-<span class="sourceLineNo">384</span><a name="line.384"></a>
-<span class="sourceLineNo">385</span>    try {<a name="line.385"></a>
-<span class="sourceLineNo">386</span>      putData(FAMILY, "row", "col1", 1);<a name="line.386"></a>
-<span class="sourceLineNo">387</span>      putData(FAMILY, "row", "col2", 2);<a name="line.387"></a>
-<span class="sourceLineNo">388</span>      region.flush(true);<a name="line.388"></a>
-<span class="sourceLineNo">389</span><a name="line.389"></a>
-<span class="sourceLineNo">390</span>      // Execute a scan with caching turned off<a name="line.390"></a>
-<span class="sourceLineNo">391</span>      // Expected blocks stored: 0<a name="line.391"></a>
-<span class="sourceLineNo">392</span>      long blocksStart = getBlkCount();<a name="line.392"></a>
-<span class="sourceLineNo">393</span>      Scan scan = new Scan();<a name="line.393"></a>
-<span class="sourceLineNo">394</span>      scan.setCacheBlocks(false);<a name="line.394"></a>
-<span class="sourceLineNo">395</span>      RegionScanner rs = region.getScanner(scan);<a name="line.395"></a>
-<span class="sourceLineNo">396</span>      List&lt;Cell&gt; result = new ArrayList&lt;&gt;(2);<a name="line.396"></a>
-<span class="sourceLineNo">397</span>      rs.next(result);<a name="line.397"></a>
-<span class="sourceLineNo">398</span>      assertEquals(2 * BLOOM_TYPE.length, result.size());<a name="line.398"></a>
-<span class="sourceLineNo">399</span>      rs.close();<a name="line.399"></a>
-<span class="sourceLineNo">400</span>      long blocksEnd = getBlkCount();<a name="line.400"></a>
-<span class="sourceLineNo">401</span><a name="line.401"></a>
-<span class="sourceLineNo">402</span>      assertEquals(blocksStart, blocksEnd);<a name="line.402"></a>
-<span class="sourceLineNo">403</span><a name="line.403"></a>
-<span class="sourceLineNo">404</span>      // Execute with caching turned on<a name="line.404"></a>
-<span class="sourceLineNo">405</span>      // Expected blocks stored: 2<a name="line.405"></a>
-<span class="sourceLineNo">406</span>      blocksStart = blocksEnd;<a name="line.406"></a>
-<span class="sourceLineNo">407</span>      scan.setCacheBlocks(true);<a name="line.407"></a>
-<span class="sourceLineNo">408</span>      rs = region.getScanner(scan);<a name="line.408"></a>
-<span class="sourceLineNo">409</span>      result = new ArrayList&lt;&gt;(2);<a name="line.409"></a>
-<span class="sourceLineNo">410</span>      rs.next(result);<a name="line.410"></a>
-<span class="sourceLineNo">411</span>      assertEquals(2 * BLOOM_TYPE.length, result.size());<a name="line.411"></a>
-<span class="sourceLineNo">412</span>      rs.close();<a name="line.412"></a>
-<span class="sourceLineNo">413</span>      blocksEnd = getBlkCount();<a name="line.413"></a>
-<span class="sourceLineNo">414</span><a name="line.414"></a>
-<span class="sourceLineNo">415</span>      assertEquals(2 * BLOOM_TYPE.length, blocksEnd - blocksStart);<a name="line.415"></a>
-<span class="sourceLineNo">416</span>    } finally {<a name="line.416"></a>
-<span class="sourceLineNo">417</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.417"></a>
-<span class="sourceLineNo">418</span>      this.region = null;<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    }<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>  @Test<a name="line.422"></a>
-<span class="sourceLineNo">423</span>  public void testLazySeekBlocksReadWithDelete() throws Exception {<a name="line.423"></a>
-<span class="sourceLineNo">424</span>    byte[] TABLE = Bytes.toBytes("testLazySeekBlocksReadWithDelete");<a name="line.424"></a>
-<span class="sourceLineNo">425</span>    String FAMILY = "cf1";<a name="line.425"></a>
-<span class="sourceLineNo">426</span>    Cell kvs[];<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    this.region = initHRegion(TABLE, testName.getMethodName(), conf, FAMILY);<a name="line.427"></a>
-<span class="sourceLineNo">428</span>    try {<a name="line.428"></a>
-<span class="sourceLineNo">429</span>      deleteFamily(FAMILY, "row", 200);<a name="line.429"></a>
-<span class="sourceLineNo">430</span>      for (int i = 0; i &lt; 100; i++) {<a name="line.430"></a>
-<span class="sourceLineNo">431</span>        putData(FAMILY, "row", "col" + i, i);<a name="line.431"></a>
-<span class="sourceLineNo">432</span>      }<a name="line.432"></a>
-<span class="sourceLineNo">433</span>      putData(FAMILY, "row", "col99", 201);<a name="line.433"></a>
-<span class="sourceLineNo">434</span>      region.flush(true);<a name="line.434"></a>
-<span class="sourceLineNo">435</span><a name="line.435"></a>
-<span class="sourceLineNo">436</span>      kvs = getData(FAMILY, "row", Arrays.asList("col0"), 2);<a name="line.436"></a>
-<span class="sourceLineNo">437</span>      assertEquals(0, kvs.length);<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>      kvs = getData(FAMILY, "row", Arrays.asList("col99"), 2);<a name="line.439"></a>
-<span class="sourceLineNo">440</span>      assertEquals(1, kvs.length);<a name="line.440"></a>
-<span class="sourceLineNo">441</span>      verifyData(kvs[0], "row", "col99", 201);<a name="line.441"></a>
-<span class="sourceLineNo">442</span>    } finally {<a name="line.442"></a>
-<span class="sourceLineNo">443</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.443"></a>
-<span class="sourceLineNo">444</span>      this.region = null;<a name="line.444"></a>
-<span class="sourceLineNo">445</span>    }<a name="line.445"></a>
-<span class="sourceLineNo">446</span>  }<a name="line.446"></a>
-<span class="sourceLineNo">447</span><a name="line.447"></a>
-<span class="sourceLineNo">448</span>}<a name="line.448"></a>
+<span class="sourceLineNo">362</span><a name="line.362"></a>
+<span class="sourceLineNo">363</span>      // Expected blocks read: 8. [HBASE-4585, HBASE-13109]<a name="line.363"></a>
+<span class="sourceLineNo">364</span>      kvs = getData(FAMILY, "row", Arrays.asList("col1", "col2", "col3"), 8, 9, 9);<a name="line.364"></a>
+<span class="sourceLineNo">365</span>      assertEquals(3, kvs.length);<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      verifyData(kvs[0], "row", "col1", 11);<a name="line.366"></a>
+<span class="sourceLineNo">367</span>      verifyData(kvs[1], "row", "col2", 12);<a name="line.367"></a>
+<span class="sourceLineNo">368</span>      verifyData(kvs[2], "row", "col3", 13);<a name="line.368"></a>
+<span class="sourceLineNo">369</span>    } finally {<a name="line.369"></a>
+<span class="sourceLineNo">370</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.370"></a>
+<span class="sourceLineNo">371</span>      this.region = null;<a name="line.371"></a>
+<span class="sourceLineNo">372</span>    }<a name="line.372"></a>
+<span class="sourceLineNo">373</span>  }<a name="line.373"></a>
+<span class="sourceLineNo">374</span><a name="line.374"></a>
+<span class="sourceLineNo">375</span>  /**<a name="line.375"></a>
+<span class="sourceLineNo">376</span>   * Test # of blocks read to ensure disabling cache-fill on Scan works.<a name="line.376"></a>
+<span class="sourceLineNo">377</span>   * @throws Exception<a name="line.377"></a>
+<span class="sourceLineNo">378</span>   */<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  @Test<a name="line.379"></a>
+<span class="sourceLineNo">380</span>  public void testBlocksStoredWhenCachingDisabled() throws Exception {<a name="line.380"></a>
+<span class="sourceLineNo">381</span>    byte [] TABLE = Bytes.toBytes("testBlocksReadWhenCachingDisabled");<a name="line.381"></a>
+<span class="sourceLineNo">382</span>    String FAMILY = "cf1";<a name="line.382"></a>
+<span class="sourceLineNo">383</span><a name="line.383"></a>
+<span class="sourceLineNo">384</span>    this.region = initHRegion(TABLE, testName.getMethodName(), conf, FAMILY);<a name="line.384"></a>
+<span class="sourceLineNo">385</span><a name="line.385"></a>
+<span class="sourceLineNo">386</span>    try {<a name="line.386"></a>
+<span class="sourceLineNo">387</span>      putData(FAMILY, "row", "col1", 1);<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      putData(FAMILY, "row", "col2", 2);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>      region.flush(true);<a name="line.389"></a>
+<span class="sourceLineNo">390</span><a name="line.390"></a>
+<span class="sourceLineNo">391</span>      // Execute a scan with caching turned off<a name="line.391"></a>
+<span class="sourceLineNo">392</span>      // Expected blocks stored: 0<a name="line.392"></a>
+<span class="sourceLineNo">393</span>      long blocksStart = getBlkCount();<a name="line.393"></a>
+<span class="sourceLineNo">394</span>      Scan scan = new Scan();<a name="line.394"></a>
+<span class="sourceLineNo">395</span>      scan.setCacheBlocks(false);<a name="line.395"></a>
+<span class="sourceLineNo">396</span>      RegionScanner rs = region.getScanner(scan);<a name="line.396"></a>
+<span class="sourceLineNo">397</span>      List&lt;Cell&gt; result = new ArrayList&lt;&gt;(2);<a name="line.397"></a>
+<span class="sourceLineNo">398</span>      rs.next(result);<a name="line.398"></a>
+<span class="sourceLineNo">399</span>      assertEquals(2 * BLOOM_TYPE.length, result.size());<a name="line.399"></a>
+<span class="sourceLineNo">400</span>      rs.close();<a name="line.400"></a>
+<span class="sourceLineNo">401</span>      long blocksEnd = getBlkCount();<a name="line.401"></a>
+<span class="sourceLineNo">402</span><a name="line.402"></a>
+<span class="sourceLineNo">403</span>      assertEquals(blocksStart, blocksEnd);<a name="line.403"></a>
+<span class="sourceLineNo">404</span><a name="line.404"></a>
+<span class="sourceLineNo">405</span>      // Execute with caching turned on<a name="line.405"></a>
+<span class="sourceLineNo">406</span>      // Expected blocks stored: 2<a name="line.406"></a>
+<span class="sourceLineNo">407</span>      blocksStart = blocksEnd;<a name="line.407"></a>
+<span class="sourceLineNo">408</span>      scan.setCacheBlocks(true);<a name="line.408"></a>
+<span class="sourceLineNo">409</span>      rs = region.getScanner(scan);<a name="line.409"></a>
+<span class="sourceLineNo">410</span>      result = new ArrayList&lt;&gt;(2);<a name="line.410"></a>
+<span class="sourceLineNo">411</span>      rs.next(result);<a name="line.411"></a>
+<span class="sourceLineNo">412</span>      assertEquals(2 * BLOOM_TYPE.length, result.size());<a name="line.412"></a>
+<span class="sourceLineNo">413</span>      rs.close();<a name="line.413"></a>
+<span class="sourceLineNo">414</span>      blocksEnd = getBlkCount();<a name="line.414"></a>
+<span class="sourceLineNo">415</span><a name="line.415"></a>
+<span class="sourceLineNo">416</span>      assertEquals(2 * BLOOM_TYPE.length, blocksEnd - blocksStart);<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    } finally {<a name="line.417"></a>
+<span class="sourceLineNo">418</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.418"></a>
+<span class="sourceLineNo">419</span>      this.region = null;<a name="line.419"></a>
+<span class="sourceLineNo">420</span>    }<a name="line.420"></a>
+<span class="sourceLineNo">421</span>  }<a name="line.421"></a>
+<span class="sourceLineNo">422</span><a name="line.422"></a>
+<span class="sourceLineNo">423</span>  @Test<a name="line.423"></a>
+<span class="sourceLineNo">424</span>  public void testLazySeekBlocksReadWithDelete() throws Exception {<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    byte[] TABLE = Bytes.toBytes("testLazySeekBlocksReadWithDelete");<a name="line.425"></a>
+<span class="sourceLineNo">426</span>    String FAMILY = "cf1";<a name="line.426"></a>
+<span class="sourceLineNo">427</span>    Cell kvs[];<a name="line.427"></a>
+<span class="sourceLineNo">428</span>    this.region = initHRegion(TABLE, testName.getMethodName(), conf, FAMILY);<a name="line.428"></a>
+<span class="sourceLineNo">429</span>    try {<a name="line.429"></a>
+<span class="sourceLineNo">430</span>      deleteFamily(FAMILY, "row", 200);<a name="line.430"></a>
+<span class="sourceLineNo">431</span>      for (int i = 0; i &lt; 100; i++) {<a name="line.431"></a>
+<span class="sourceLineNo">432</span>        putData(FAMILY, "row", "col" + i, i);<a name="line.432"></a>
+<span class="sourceLineNo">433</span>      }<a name="line.433"></a>
+<span class="sourceLineNo">434</span>      putData(FAMILY, "row", "col99", 201);<a name="line.434"></a>
+<span class="sourceLineNo">435</span>      region.flush(true);<a name="line.435"></a>
+<span class="sourceLineNo">436</span><a name="line.436"></a>
+<span class="sourceLineNo">437</span>      kvs = getData(FAMILY, "row", Arrays.asList("col0"), 2);<a name="line.437"></a>
+<span class="sourceLineNo">438</span>      assertEquals(0, kvs.length);<a name="line.438"></a>
+<span class="sourceLineNo">439</span><a name="line.439"></a>
+<span class="sourceLineNo">440</span>      kvs = getData(FAMILY, "row", Arrays.asList("col99"), 2);<a name="line.440"></a>
+<span class="sourceLineNo">441</span>      assertEquals(1, kvs.length);<a name="line.441"></a>
+<span class="sourceLineNo">442</span>      verifyData(kvs[0], "row", "col99", 201);<a name="line.442"></a>
+<span class="sourceLineNo">443</span>    } finally {<a name="line.443"></a>
+<span class="sourceLineNo">444</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.444"></a>
+<span class="sourceLineNo">445</span>      this.region = null;<a name="line.445"></a>
+<span class="sourceLineNo">446</span>    }<a name="line.446"></a>
+<span class="sourceLineNo">447</span>  }<a name="line.447"></a>
+<span class="sourceLineNo">448</span><a name="line.448"></a>
+<span class="sourceLineNo">449</span>}<a name="line.449"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.html
index 5596067..45fb978 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.html
@@ -69,8 +69,8 @@
 <span class="sourceLineNo">061</span>  @Before<a name="line.61"></a>
 <span class="sourceLineNo">062</span>  public void setUp() throws Exception {<a name="line.62"></a>
 <span class="sourceLineNo">063</span>    super.setUp();<a name="line.63"></a>
-<span class="sourceLineNo">064</span><a name="line.64"></a>
-<span class="sourceLineNo">065</span>    TEST_UTIL = new HBaseTestingUtility();<a name="line.65"></a>
+<span class="sourceLineNo">064</span>    TEST_UTIL = new HBaseTestingUtility();<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration());<a name="line.65"></a>
 <span class="sourceLineNo">066</span>  }<a name="line.66"></a>
 <span class="sourceLineNo">067</span><a name="line.67"></a>
 <span class="sourceLineNo">068</span>  @Test<a name="line.68"></a>


[16/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index 6369c27..ea05301 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -603,3251 +603,3256 @@
 <span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
 <span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
 <span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.599"></a>
+<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span><a name="line.599"></a>
 <span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
 <span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      cacheConfig = new CacheConfig(conf, !isMasterNotCarryTable);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      mobCacheConfig = new MobCacheConfig(conf, !isMasterNotCarryTable);<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        @Override<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>        }<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      };<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>      initializeFileSystem();<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.612"></a>
-<span class="sourceLineNo">613</span><a name="line.613"></a>
-<span class="sourceLineNo">614</span>      this.configurationManager = new ConfigurationManager();<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.615"></a>
-<span class="sourceLineNo">616</span><a name="line.616"></a>
-<span class="sourceLineNo">617</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>        // Open connection to zookeeper and set primary watcher<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.620"></a>
-<span class="sourceLineNo">621</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.621"></a>
-<span class="sourceLineNo">622</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.622"></a>
-<span class="sourceLineNo">623</span>        if (!this.masterless) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>          masterAddressTracker.start();<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span>          clusterStatusTracker.start();<a name="line.630"></a>
-<span class="sourceLineNo">631</span>        } else {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker = null;<a name="line.632"></a>
-<span class="sourceLineNo">633</span>          clusterStatusTracker = null;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>        }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      } else {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        zooKeeper = null;<a name="line.636"></a>
-<span class="sourceLineNo">637</span>        masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>        clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>      }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      this.rpcServices.start(zooKeeper);<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.641"></a>
-<span class="sourceLineNo">642</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      // class HRS. TODO.<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      this.choreService = new ChoreService(getName(), true);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      this.executorService = new ExecutorService(getName());<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      putUpWebUI();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    } catch (Throwable t) {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      // cause of failed startup is lost.<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      LOG.error("Failed construction RegionServer", t);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      throw t;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>  }<a name="line.655"></a>
-<span class="sourceLineNo">656</span><a name="line.656"></a>
-<span class="sourceLineNo">657</span>  // HMaster should override this method to load the specific config for master<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.662"></a>
-<span class="sourceLineNo">663</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.663"></a>
-<span class="sourceLineNo">664</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>        throw new IOException(msg);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      } else {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return rpcServices.isa.getHostName();<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      }<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    } else {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      return hostname;<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    }<a name="line.671"></a>
-<span class="sourceLineNo">672</span>  }<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>  /**<a name="line.674"></a>
-<span class="sourceLineNo">675</span>   * If running on Windows, do windows-specific setup.<a name="line.675"></a>
-<span class="sourceLineNo">676</span>   */<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        @Override<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        public void handle(Signal signal) {<a name="line.681"></a>
-<span class="sourceLineNo">682</span>          conf.reloadConfiguration();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>          cm.notifyAllObservers(conf);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      });<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    NettyEventLoopGroupConfig nelgc =<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    return nelgc;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>  }<a name="line.696"></a>
-<span class="sourceLineNo">697</span><a name="line.697"></a>
-<span class="sourceLineNo">698</span>  private void initializeFileSystem() throws IOException {<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    // (unless all is set to defaults).<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    return new FSTableDescriptors(this.conf,<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.716"></a>
+<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
+<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
+<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
+<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
+<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
+<span class="sourceLineNo">615</span><a name="line.615"></a>
+<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
+<span class="sourceLineNo">618</span><a name="line.618"></a>
+<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
+<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
+<span class="sourceLineNo">621</span><a name="line.621"></a>
+<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
+<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
+<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
+<span class="sourceLineNo">630</span><a name="line.630"></a>
+<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
+<span class="sourceLineNo">633</span><a name="line.633"></a>
+<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
+<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
+<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
+<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
+<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
+<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
+<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
+<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
+<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
+<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
+<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
+<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
+<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
+<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
+<span class="sourceLineNo">661</span><a name="line.661"></a>
+<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
+<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
+<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
+<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
+<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
+<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
+<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
+<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
+<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
+<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
+<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
+<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
+<span class="sourceLineNo">678</span><a name="line.678"></a>
+<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
+<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
+<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
+<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
+<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
+<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
+<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
+<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
+<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
+<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
+<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
+<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
+<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
+<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
+<span class="sourceLineNo">693</span><a name="line.693"></a>
+<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
+<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
+<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
+<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
+<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
+<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
+<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
+<span class="sourceLineNo">702</span><a name="line.702"></a>
+<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
+<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
+<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
+<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
+<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
+<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
+<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
+<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
+<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
+<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
 <span class="sourceLineNo">717</span>  }<a name="line.717"></a>
 <span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return null;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    user.login("hbase.regionserver.keytab.file",<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.725"></a>
+<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
+<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
+<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
+<span class="sourceLineNo">723</span><a name="line.723"></a>
+<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
+<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
 <span class="sourceLineNo">726</span>  }<a name="line.726"></a>
 <span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span><a name="line.728"></a>
-<span class="sourceLineNo">729</span>  /**<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   * Wait for an active Master.<a name="line.730"></a>
-<span class="sourceLineNo">731</span>   * See override in Master superclass for how it is used.<a name="line.731"></a>
-<span class="sourceLineNo">732</span>   */<a name="line.732"></a>
-<span class="sourceLineNo">733</span>  protected void waitForMasterActive() {}<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>  protected String getProcessName() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    return REGIONSERVER;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>  protected boolean canCreateBaseZNode() {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    return this.masterless;<a name="line.740"></a>
-<span class="sourceLineNo">741</span>  }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>  protected boolean canUpdateTableDescriptor() {<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    return false;<a name="line.744"></a>
-<span class="sourceLineNo">745</span>  }<a name="line.745"></a>
-<span class="sourceLineNo">746</span><a name="line.746"></a>
-<span class="sourceLineNo">747</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    return new RSRpcServices(this);<a name="line.748"></a>
-<span class="sourceLineNo">749</span>  }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>  protected void configureInfoServer() {<a name="line.751"></a>
-<span class="sourceLineNo">752</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.753"></a>
+<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
+<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
+<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
+<span class="sourceLineNo">732</span><a name="line.732"></a>
+<span class="sourceLineNo">733</span><a name="line.733"></a>
+<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
+<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
+<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
+<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
+<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">739</span><a name="line.739"></a>
+<span class="sourceLineNo">740</span>  protected String getProcessName() {<a name="line.740"></a>
+<span class="sourceLineNo">741</span>    return REGIONSERVER;<a name="line.741"></a>
+<span class="sourceLineNo">742</span>  }<a name="line.742"></a>
+<span class="sourceLineNo">743</span><a name="line.743"></a>
+<span class="sourceLineNo">744</span>  protected boolean canCreateBaseZNode() {<a name="line.744"></a>
+<span class="sourceLineNo">745</span>    return this.masterless;<a name="line.745"></a>
+<span class="sourceLineNo">746</span>  }<a name="line.746"></a>
+<span class="sourceLineNo">747</span><a name="line.747"></a>
+<span class="sourceLineNo">748</span>  protected boolean canUpdateTableDescriptor() {<a name="line.748"></a>
+<span class="sourceLineNo">749</span>    return false;<a name="line.749"></a>
+<span class="sourceLineNo">750</span>  }<a name="line.750"></a>
+<span class="sourceLineNo">751</span><a name="line.751"></a>
+<span class="sourceLineNo">752</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.752"></a>
+<span class="sourceLineNo">753</span>    return new RSRpcServices(this);<a name="line.753"></a>
 <span class="sourceLineNo">754</span>  }<a name="line.754"></a>
 <span class="sourceLineNo">755</span><a name="line.755"></a>
-<span class="sourceLineNo">756</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>    return RSDumpServlet.class;<a name="line.757"></a>
-<span class="sourceLineNo">758</span>  }<a name="line.758"></a>
-<span class="sourceLineNo">759</span><a name="line.759"></a>
-<span class="sourceLineNo">760</span>  @Override<a name="line.760"></a>
-<span class="sourceLineNo">761</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    /*<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * No stacking of instances is allowed for a single executorService name<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        instance.getDescriptorForType();<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.769"></a>
-<span class="sourceLineNo">770</span>          + " already registered, rejecting request from " + instance);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      return false;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>    }<a name="line.772"></a>
-<span class="sourceLineNo">773</span><a name="line.773"></a>
-<span class="sourceLineNo">774</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>    if (LOG.isDebugEnabled()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.776"></a>
+<span class="sourceLineNo">756</span>  protected void configureInfoServer() {<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.758"></a>
+<span class="sourceLineNo">759</span>  }<a name="line.759"></a>
+<span class="sourceLineNo">760</span><a name="line.760"></a>
+<span class="sourceLineNo">761</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.761"></a>
+<span class="sourceLineNo">762</span>    return RSDumpServlet.class;<a name="line.762"></a>
+<span class="sourceLineNo">763</span>  }<a name="line.763"></a>
+<span class="sourceLineNo">764</span><a name="line.764"></a>
+<span class="sourceLineNo">765</span>  @Override<a name="line.765"></a>
+<span class="sourceLineNo">766</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>    /*<a name="line.767"></a>
+<span class="sourceLineNo">768</span>     * No stacking of instances is allowed for a single executorService name<a name="line.768"></a>
+<span class="sourceLineNo">769</span>     */<a name="line.769"></a>
+<span class="sourceLineNo">770</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.770"></a>
+<span class="sourceLineNo">771</span>        instance.getDescriptorForType();<a name="line.771"></a>
+<span class="sourceLineNo">772</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.774"></a>
+<span class="sourceLineNo">775</span>          + " already registered, rejecting request from " + instance);<a name="line.775"></a>
+<span class="sourceLineNo">776</span>      return false;<a name="line.776"></a>
 <span class="sourceLineNo">777</span>    }<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    return true;<a name="line.778"></a>
-<span class="sourceLineNo">779</span>  }<a name="line.779"></a>
-<span class="sourceLineNo">780</span><a name="line.780"></a>
-<span class="sourceLineNo">781</span>  /**<a name="line.781"></a>
-<span class="sourceLineNo">782</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   */<a name="line.784"></a>
-<span class="sourceLineNo">785</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    Configuration conf = this.conf;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.787"></a>
-<span class="sourceLineNo">788</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.788"></a>
-<span class="sourceLineNo">789</span>      // the conf and unset the client ZK related properties<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      conf = new Configuration(this.conf);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.791"></a>
-<span class="sourceLineNo">792</span>    }<a name="line.792"></a>
-<span class="sourceLineNo">793</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    // and remote invocations.<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.799"></a>
-<span class="sourceLineNo">800</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.801"></a>
-<span class="sourceLineNo">802</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    return conn;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>  }<a name="line.805"></a>
-<span class="sourceLineNo">806</span><a name="line.806"></a>
-<span class="sourceLineNo">807</span>  /**<a name="line.807"></a>
-<span class="sourceLineNo">808</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.808"></a>
-<span class="sourceLineNo">809</span>   * @param c<a name="line.809"></a>
-<span class="sourceLineNo">810</span>   * @throws IOException<a name="line.810"></a>
-<span class="sourceLineNo">811</span>   */<a name="line.811"></a>
-<span class="sourceLineNo">812</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>    // check to see if the codec list is available:<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    if (codecs == null) return;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>    for (String codec : codecs) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.817"></a>
-<span class="sourceLineNo">818</span>        throw new IOException("Compression codec " + codec +<a name="line.818"></a>
-<span class="sourceLineNo">819</span>          " not supported, aborting RS construction");<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      }<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    }<a name="line.821"></a>
-<span class="sourceLineNo">822</span>  }<a name="line.822"></a>
-<span class="sourceLineNo">823</span><a name="line.823"></a>
-<span class="sourceLineNo">824</span>  public String getClusterId() {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>    return this.clusterId;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>  }<a name="line.826"></a>
-<span class="sourceLineNo">827</span><a name="line.827"></a>
-<span class="sourceLineNo">828</span>  /**<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * Setup our cluster connection if not already initialized.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   */<a name="line.830"></a>
-<span class="sourceLineNo">831</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>    if (clusterConnection == null) {<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      clusterConnection = createClusterConnection();<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      metaTableLocator = new MetaTableLocator();<a name="line.834"></a>
-<span class="sourceLineNo">835</span>    }<a name="line.835"></a>
-<span class="sourceLineNo">836</span>  }<a name="line.836"></a>
-<span class="sourceLineNo">837</span><a name="line.837"></a>
-<span class="sourceLineNo">838</span>  /**<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.839"></a>
-<span class="sourceLineNo">840</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.840"></a>
-<span class="sourceLineNo">841</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.841"></a>
-<span class="sourceLineNo">842</span>   */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>  private void preRegistrationInitialization() {<a name="line.843"></a>
-<span class="sourceLineNo">844</span>    try {<a name="line.844"></a>
-<span class="sourceLineNo">845</span>      initializeZooKeeper();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>      setupClusterConnection();<a name="line.846"></a>
-<span class="sourceLineNo">847</span>      // Setup RPC client for master communication<a name="line.847"></a>
-<span class="sourceLineNo">848</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    } catch (Throwable t) {<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      // Call stop if error or process will stick around for ever since server<a name="line.851"></a>
-<span class="sourceLineNo">852</span>      // puts up non-daemon threads.<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      this.rpcServices.stop();<a name="line.853"></a>
-<span class="sourceLineNo">854</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.854"></a>
-<span class="sourceLineNo">855</span>    }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>  }<a name="line.856"></a>
-<span class="sourceLineNo">857</span><a name="line.857"></a>
-<span class="sourceLineNo">858</span>  /**<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * &lt;p&gt;<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   * Finally open long-living server short-circuit connection.<a name="line.862"></a>
-<span class="sourceLineNo">863</span>   */<a name="line.863"></a>
-<span class="sourceLineNo">864</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    justification="cluster Id znode read would give us correct response")<a name="line.865"></a>
-<span class="sourceLineNo">866</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    // Nothing to do in here if no Master in the mix.<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    if (this.masterless) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      return;<a name="line.869"></a>
-<span class="sourceLineNo">870</span>    }<a name="line.870"></a>
-<span class="sourceLineNo">871</span><a name="line.871"></a>
-<span class="sourceLineNo">872</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.872"></a>
-<span class="sourceLineNo">873</span>    // block until a master is available.  No point in starting up if no master<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // running.<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.875"></a>
+<span class="sourceLineNo">778</span><a name="line.778"></a>
+<span class="sourceLineNo">779</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    if (LOG.isDebugEnabled()) {<a name="line.780"></a>
+<span class="sourceLineNo">781</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.781"></a>
+<span class="sourceLineNo">782</span>    }<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    return true;<a name="line.783"></a>
+<span class="sourceLineNo">784</span>  }<a name="line.784"></a>
+<span class="sourceLineNo">785</span><a name="line.785"></a>
+<span class="sourceLineNo">786</span>  /**<a name="line.786"></a>
+<span class="sourceLineNo">787</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.787"></a>
+<span class="sourceLineNo">788</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.788"></a>
+<span class="sourceLineNo">789</span>   */<a name="line.789"></a>
+<span class="sourceLineNo">790</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.790"></a>
+<span class="sourceLineNo">791</span>    Configuration conf = this.conf;<a name="line.791"></a>
+<span class="sourceLineNo">792</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.792"></a>
+<span class="sourceLineNo">793</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.793"></a>
+<span class="sourceLineNo">794</span>      // the conf and unset the client ZK related properties<a name="line.794"></a>
+<span class="sourceLineNo">795</span>      conf = new Configuration(this.conf);<a name="line.795"></a>
+<span class="sourceLineNo">796</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.796"></a>
+<span class="sourceLineNo">797</span>    }<a name="line.797"></a>
+<span class="sourceLineNo">798</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.798"></a>
+<span class="sourceLineNo">799</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.799"></a>
+<span class="sourceLineNo">800</span>    // and remote invocations.<a name="line.800"></a>
+<span class="sourceLineNo">801</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.801"></a>
+<span class="sourceLineNo">802</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.802"></a>
+<span class="sourceLineNo">803</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.803"></a>
+<span class="sourceLineNo">804</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.804"></a>
+<span class="sourceLineNo">805</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.805"></a>
+<span class="sourceLineNo">806</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.806"></a>
+<span class="sourceLineNo">807</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.807"></a>
+<span class="sourceLineNo">808</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.808"></a>
+<span class="sourceLineNo">809</span>    return conn;<a name="line.809"></a>
+<span class="sourceLineNo">810</span>  }<a name="line.810"></a>
+<span class="sourceLineNo">811</span><a name="line.811"></a>
+<span class="sourceLineNo">812</span>  /**<a name="line.812"></a>
+<span class="sourceLineNo">813</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.813"></a>
+<span class="sourceLineNo">814</span>   * @param c<a name="line.814"></a>
+<span class="sourceLineNo">815</span>   * @throws IOException<a name="line.815"></a>
+<span class="sourceLineNo">816</span>   */<a name="line.816"></a>
+<span class="sourceLineNo">817</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.817"></a>
+<span class="sourceLineNo">818</span>    // check to see if the codec list is available:<a name="line.818"></a>
+<span class="sourceLineNo">819</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.819"></a>
+<span class="sourceLineNo">820</span>    if (codecs == null) return;<a name="line.820"></a>
+<span class="sourceLineNo">821</span>    for (String codec : codecs) {<a name="line.821"></a>
+<span class="sourceLineNo">822</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.822"></a>
+<span class="sourceLineNo">823</span>        throw new IOException("Compression codec " + codec +<a name="line.823"></a>
+<span class="sourceLineNo">824</span>          " not supported, aborting RS construction");<a name="line.824"></a>
+<span class="sourceLineNo">825</span>      }<a name="line.825"></a>
+<span class="sourceLineNo">826</span>    }<a name="line.826"></a>
+<span class="sourceLineNo">827</span>  }<a name="line.827"></a>
+<span class="sourceLineNo">828</span><a name="line.828"></a>
+<span class="sourceLineNo">829</span>  public String getClusterId() {<a name="line.829"></a>
+<span class="sourceLineNo">830</span>    return this.clusterId;<a name="line.830"></a>
+<span class="sourceLineNo">831</span>  }<a name="line.831"></a>
+<span class="sourceLineNo">832</span><a name="line.832"></a>
+<span class="sourceLineNo">833</span>  /**<a name="line.833"></a>
+<span class="sourceLineNo">834</span>   * Setup our cluster connection if not already initialized.<a name="line.834"></a>
+<span class="sourceLineNo">835</span>   */<a name="line.835"></a>
+<span class="sourceLineNo">836</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.836"></a>
+<span class="sourceLineNo">837</span>    if (clusterConnection == null) {<a name="line.837"></a>
+<span class="sourceLineNo">838</span>      clusterConnection = createClusterConnection();<a name="line.838"></a>
+<span class="sourceLineNo">839</span>      metaTableLocator = new MetaTableLocator();<a name="line.839"></a>
+<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
+<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
+<span class="sourceLineNo">842</span><a name="line.842"></a>
+<span class="sourceLineNo">843</span>  /**<a name="line.843"></a>
+<span class="sourceLineNo">844</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.844"></a>
+<span class="sourceLineNo">845</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.845"></a>
+<span class="sourceLineNo">846</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.846"></a>
+<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
+<span class="sourceLineNo">848</span>  private void preRegistrationInitialization() {<a name="line.848"></a>
+<span class="sourceLineNo">849</span>    try {<a name="line.849"></a>
+<span class="sourceLineNo">850</span>      initializeZooKeeper();<a name="line.850"></a>
+<span class="sourceLineNo">851</span>      setupClusterConnection();<a name="line.851"></a>
+<span class="sourceLineNo">852</span>      // Setup RPC client for master communication<a name="line.852"></a>
+<span class="sourceLineNo">853</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.853"></a>
+<span class="sourceLineNo">854</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.854"></a>
+<span class="sourceLineNo">855</span>    } catch (Throwable t) {<a name="line.855"></a>
+<span class="sourceLineNo">856</span>      // Call stop if error or process will stick around for ever since server<a name="line.856"></a>
+<span class="sourceLineNo">857</span>      // puts up non-daemon threads.<a name="line.857"></a>
+<span class="sourceLineNo">858</span>      this.rpcServices.stop();<a name="line.858"></a>
+<span class="sourceLineNo">859</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.859"></a>
+<span class="sourceLineNo">860</span>    }<a name="line.860"></a>
+<span class="sourceLineNo">861</span>  }<a name="line.861"></a>
+<span class="sourceLineNo">862</span><a name="line.862"></a>
+<span class="sourceLineNo">863</span>  /**<a name="line.863"></a>
+<span class="sourceLineNo">864</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.864"></a>
+<span class="sourceLineNo">865</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.865"></a>
+<span class="sourceLineNo">866</span>   * &lt;p&gt;<a name="line.866"></a>
+<span class="sourceLineNo">867</span>   * Finally open long-living server short-circuit connection.<a name="line.867"></a>
+<span class="sourceLineNo">868</span>   */<a name="line.868"></a>
+<span class="sourceLineNo">869</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.869"></a>
+<span class="sourceLineNo">870</span>    justification="cluster Id znode read would give us correct response")<a name="line.870"></a>
+<span class="sourceLineNo">871</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.871"></a>
+<span class="sourceLineNo">872</span>    // Nothing to do in here if no Master in the mix.<a name="line.872"></a>
+<span class="sourceLineNo">873</span>    if (this.masterless) {<a name="line.873"></a>
+<span class="sourceLineNo">874</span>      return;<a name="line.874"></a>
+<span class="sourceLineNo">875</span>    }<a name="line.875"></a>
 <span class="sourceLineNo">876</span><a name="line.876"></a>
-<span class="sourceLineNo">877</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    // when ready.<a name="line.878"></a>
-<span class="sourceLineNo">879</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.881"></a>
-<span class="sourceLineNo">882</span>    if (clusterId == null) {<a name="line.882"></a>
-<span class="sourceLineNo">883</span>      // Retrieve clusterId<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      // Since cluster status is now up<a name="line.884"></a>
-<span class="sourceLineNo">885</span>      // ID should have already been set by HMaster<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      try {<a name="line.886"></a>
-<span class="sourceLineNo">887</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>        if (clusterId == null) {<a name="line.888"></a>
-<span class="sourceLineNo">889</span>          this.abort("Cluster ID has not been set");<a name="line.889"></a>
-<span class="sourceLineNo">890</span>        }<a name="line.890"></a>
-<span class="sourceLineNo">891</span>        LOG.info("ClusterId : " + clusterId);<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      } catch (KeeperException e) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.893"></a>
-<span class="sourceLineNo">894</span>      }<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    }<a name="line.895"></a>
-<span class="sourceLineNo">896</span><a name="line.896"></a>
-<span class="sourceLineNo">897</span>    waitForMasterActive();<a name="line.897"></a>
-<span class="sourceLineNo">898</span>    if (isStopped() || isAborted()) {<a name="line.898"></a>
-<span class="sourceLineNo">899</span>      return; // No need for further initialization<a name="line.899"></a>
+<span class="sourceLineNo">877</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.877"></a>
+<span class="sourceLineNo">878</span>    // block until a master is available.  No point in starting up if no master<a name="line.878"></a>
+<span class="sourceLineNo">879</span>    // running.<a name="line.879"></a>
+<span class="sourceLineNo">880</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.880"></a>
+<span class="sourceLineNo">881</span><a name="line.881"></a>
+<span class="sourceLineNo">882</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.882"></a>
+<span class="sourceLineNo">883</span>    // when ready.<a name="line.883"></a>
+<span class="sourceLineNo">884</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.884"></a>
+<span class="sourceLineNo">885</span><a name="line.885"></a>
+<span class="sourceLineNo">886</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.886"></a>
+<span class="sourceLineNo">887</span>    if (clusterId == null) {<a name="line.887"></a>
+<span class="sourceLineNo">888</span>      // Retrieve clusterId<a name="line.888"></a>
+<span class="sourceLineNo">889</span>      // Since cluster status is now up<a name="line.889"></a>
+<span class="sourceLineNo">890</span>      // ID should have already been set by HMaster<a name="line.890"></a>
+<span class="sourceLineNo">891</span>      try {<a name="line.891"></a>
+<span class="sourceLineNo">892</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.892"></a>
+<span class="sourceLineNo">893</span>        if (clusterId == null) {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>          this.abort("Cluster ID has not been set");<a name="line.894"></a>
+<span class="sourceLineNo">895</span>        }<a name="line.895"></a>
+<span class="sourceLineNo">896</span>        LOG.info("ClusterId : " + clusterId);<a name="line.896"></a>
+<span class="sourceLineNo">897</span>      } catch (KeeperException e) {<a name="line.897"></a>
+<span class="sourceLineNo">898</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.898"></a>
+<span class="sourceLineNo">899</span>      }<a name="line.899"></a>
 <span class="sourceLineNo">900</span>    }<a name="line.900"></a>
 <span class="sourceLineNo">901</span><a name="line.901"></a>
-<span class="sourceLineNo">902</span>    // watch for snapshots and other procedures<a name="line.902"></a>
-<span class="sourceLineNo">903</span>    try {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>      rspmHost.loadProcedures(conf);<a name="line.905"></a>
-<span class="sourceLineNo">906</span>      rspmHost.initialize(this);<a name="line.906"></a>
-<span class="sourceLineNo">907</span>    } catch (KeeperException e) {<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.908"></a>
-<span class="sourceLineNo">909</span>    }<a name="line.909"></a>
-<span class="sourceLineNo">910</span>  }<a name="line.910"></a>
-<span class="sourceLineNo">911</span><a name="line.911"></a>
-<span class="sourceLineNo">912</span>  /**<a name="line.912"></a>
-<span class="sourceLineNo">913</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.913"></a>
-<span class="sourceLineNo">914</span>   * if the region server is shut down<a name="line.914"></a>
-<span class="sourceLineNo">915</span>   * @param tracker znode tracker to use<a name="line.915"></a>
-<span class="sourceLineNo">916</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.916"></a>
-<span class="sourceLineNo">917</span>   * @throws InterruptedException<a name="line.917"></a>
-<span class="sourceLineNo">918</span>   */<a name="line.918"></a>
-<span class="sourceLineNo">919</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      throws IOException, InterruptedException {<a name="line.920"></a>
-<span class="sourceLineNo">921</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>      if (this.stopped) {<a name="line.922"></a>
-<span class="sourceLineNo">923</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      }<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span>  }<a name="line.926"></a>
-<span class="sourceLineNo">927</span><a name="line.927"></a>
-<span class="sourceLineNo">928</span>  /**<a name="line.928"></a>
-<span class="sourceLineNo">929</span>   * @return True if the cluster is up.<a name="line.929"></a>
-<span class="sourceLineNo">930</span>   */<a name="line.930"></a>
-<span class="sourceLineNo">931</span>  @Override<a name="line.931"></a>
-<span class="sourceLineNo">932</span>  public boolean isClusterUp() {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    return this.masterless ||<a name="line.933"></a>
-<span class="sourceLineNo">934</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.934"></a>
-<span class="sourceLineNo">935</span>  }<a name="line.935"></a>
-<span class="sourceLineNo">936</span><a name="line.936"></a>
-<span class="sourceLineNo">937</span>  /**<a name="line.937"></a>
-<span class="sourceLineNo">938</span>   * The HRegionServer sticks in this loop until closed.<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   */<a name="line.939"></a>
-<span class="sourceLineNo">940</span>  @Override<a name="line.940"></a>
-<span class="sourceLineNo">941</span>  public void run() {<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    try {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.943"></a>
-<span class="sourceLineNo">944</span>      preRegistrationInitialization();<a name="line.944"></a>
-<span class="sourceLineNo">945</span>    } catch (Throwable e) {<a name="line.945"></a>
-<span class="sourceLineNo">946</span>      abort("Fatal exception during initialization", e);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    try {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.951"></a>
-<span class="sourceLineNo">952</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.952"></a>
-<span class="sourceLineNo">953</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.953"></a>
-<span class="sourceLineNo">954</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      }<a name="line.955"></a>
-<span class="sourceLineNo">956</span><a name="line.956"></a>
-<span class="sourceLineNo">957</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.957"></a>
-<span class="sourceLineNo">958</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      LOG.debug("About to register with Master.");<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.961"></a>
-<span class="sourceLineNo">962</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.962"></a>
-<span class="sourceLineNo">963</span>      RetryCounter rc = rcf.create();<a name="line.963"></a>
-<span class="sourceLineNo">964</span>      while (keepLooping()) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.965"></a>
-<span class="sourceLineNo">966</span>        if (w == null) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.967"></a>
-<span class="sourceLineNo">968</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.968"></a>
-<span class="sourceLineNo">969</span>          this.sleeper.sleep(sleepTime);<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        } else {<a name="line.970"></a>
-<span class="sourceLineNo">971</span>          handleReportForDutyResponse(w);<a name="line.971"></a>
-<span class="sourceLineNo">972</span>          break;<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        }<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span><a name="line.975"></a>
-<span class="sourceLineNo">976</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>        // start the snapshot handler and other procedure handlers,<a name="line.977"></a>
-<span class="sourceLineNo">978</span>        // since the server is ready to run<a name="line.978"></a>
-<span class="sourceLineNo">979</span>        if (this.rspmHost != null) {<a name="line.979"></a>
-<span class="sourceLineNo">980</span>          this.rspmHost.start();<a name="line.980"></a>
-<span class="sourceLineNo">981</span>        }<a name="line.981"></a>
-<span class="sourceLineNo">982</span>        // Start the Quota Manager<a name="line.982"></a>
-<span class="sourceLineNo">983</span>        if (this.rsQuotaManager != null) {<a name="line.983"></a>
-<span class="sourceLineNo">984</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.984"></a>
-<span class="sourceLineNo">985</span>        }<a name="line.985"></a>
-<span class="sourceLineNo">986</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.986"></a>
-<span class="sourceLineNo">987</span>          this.rsSpaceQuotaManager.start();<a name="line.987"></a>
-<span class="sourceLineNo">988</span>        }<a name="line.988"></a>
-<span class="sourceLineNo">989</span>      }<a name="line.989"></a>
-<span class="sourceLineNo">990</span><a name="line.990"></a>
-<span class="sourceLineNo">991</span>      // We registered with the Master.  Go into run mode.<a name="line.991"></a>
-<span class="sourceLineNo">992</span>      long lastMsg = System.currentTimeMillis();<a name="line.992"></a>
-<span class="sourceLineNo">993</span>      long oldRequestCount = -1;<a name="line.993"></a>
-<span class="sourceLineNo">994</span>      // The main run loop.<a name="line.994"></a>
-<span class="sourceLineNo">995</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.995"></a>
-<span class="sourceLineNo">996</span>        if (!isClusterUp()) {<a name="line.996"></a>
-<span class="sourceLineNo">997</span>          if (isOnlineRegionsEmpty()) {<a name="line.997"></a>
-<span class="sourceLineNo">998</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.998"></a>
-<span class="sourceLineNo">999</span>          } else if (!this.stopping) {<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>            this.stopping = true;<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>            LOG.info("Closing user regions");<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>            closeUserRegions(this.abortRequested);<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>          } else if (this.stopping) {<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>            if (allUserRegionsOffline) {<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>              // Set stopped if no more write requests tp meta tables<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span>              // since last time we went around the loop.  Any open<a name="line.1007"></a>
-<span class="sourceLineNo">1008</span>              // meta regions will be closed on our way out.<a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>                break;<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>              }<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>              oldRequestCount = getWriteRequestCount();<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>            } else {<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>              // Make sure all regions have been closed -- some regions may<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>              // have not got it because we were splitting at the time of<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>              // the call to closeUserRegions.<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>              closeUserRegions(this.abortRequested);<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span>            }<a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>          }<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>        }<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span>        long now = System.currentTimeMillis();<a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>        if ((now - lastMsg) &gt;= msgInterval) {<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>          tryRegionServerReport(lastMsg, now);<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>          lastMsg = System.currentTimeMillis();<a name="line.1026"></a>
+<span class="sourceLineNo">902</span>    waitForMasterActive();<a name="line.902"></a>
+<span class="sourceLineNo">903</span>    if (isStopped() || isAborted()) {<a name="line.903"></a>
+<span class="sourceLineNo">904</span>      return; // No need for further initialization<a name="line.904"></a>
+<span class="sourceLineNo">905</span>    }<a name="line.905"></a>
+<span class="sourceLineNo">906</span><a name="line.906"></a>
+<span class="sourceLineNo">907</span>    // watch for snapshots and other procedures<a name="line.907"></a>
+<span class="sourceLineNo">908</span>    try {<a name="line.908"></a>
+<span class="sourceLineNo">909</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.909"></a>
+<span class="sourceLineNo">910</span>      rspmHost.loadProcedures(conf);<a name="line.910"></a>
+<span class="sourceLineNo">911</span>      rspmHost.initialize(this);<a name="line.911"></a>
+<span class="sourceLineNo">912</span>    } catch (KeeperException e) {<a name="line.912"></a>
+<span class="sourceLineNo">913</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.913"></a>
+<span class="sourceLineNo">914</span>    }<a name="line.914"></a>
+<span class="sourceLineNo">915</span>  }<a name="line.915"></a>
+<span class="sourceLineNo">916</span><a name="line.916"></a>
+<span class="sourceLineNo">917</span>  /**<a name="line.917"></a>
+<span class="sourceLineNo">918</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.918"></a>
+<span class="sourceLineNo">919</span>   * if the region server is shut down<a name="line.919"></a>
+<span class="sourceLineNo">920</span>   * @param tracker znode tracker to use<a name="line.920"></a>
+<span class="sourceLineNo">921</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.921"></a>
+<span class="sourceLineNo">922</span>   * @throws InterruptedException<a name="line.922"></a>
+<span class="sourceLineNo">923</span>   */<a name="line.923"></a>
+<span class="sourceLineNo">924</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.924"></a>
+<span class="sourceLineNo">925</span>      throws IOException, InterruptedException {<a name="line.925"></a>
+<span class="sourceLineNo">926</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.926"></a>
+<span class="sourceLineNo">927</span>      if (this.stopped) {<a name="line.927"></a>
+<span class="sourceLineNo">928</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.928"></a>
+<span class="sourceLineNo">929</span>      }<a name="line.929"></a>
+<span class="sourceLineNo">930</span>    }<a name="line.930"></a>
+<span class="sourceLineNo">931</span>  }<a name="line.931"></a>
+<span class="sourceLineNo">932</span><a name="line.932"></a>
+<span class="sourceLineNo">933</span>  /**<a name="line.933"></a>
+<span class="sourceLineNo">934</span>   * @return True if the cluster is up.<a name="line.934"></a>
+<span class="sourceLineNo">935</span>   */<a name="line.935"></a>
+<span class="sourceLineNo">936</span>  @Override<a name="line.936"></a>
+<span class="sourceLineNo">937</span>  public boolean isClusterUp() {<a name="line.937"></a>
+<span class="sourceLineNo">938</span>    return this.masterless ||<a name="line.938"></a>
+<span class="sourceLineNo">939</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.939"></a>
+<span class="sourceLineNo">940</span>  }<a name="line.940"></a>
+<span class="sourceLineNo">941</span><a name="line.941"></a>
+<span class="sourceLineNo">942</span>  /**<a name="line.942"></a>
+<span class="sourceLineNo">943</span>   * The HRegionServer sticks in this loop until closed.<a name="line.943"></a>
+<span class="sourceLineNo">944</span>   */<a name="line.944"></a>
+<span class="sourceLineNo">945</span>  @Override<a name="line.945"></a>
+<span class="sourceLineNo">946</span>  public void run() {<a name="line.946"></a>
+<span class="sourceLineNo">947</span>    try {<a name="line.947"></a>
+<span class="sourceLineNo">948</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.948"></a>
+<span class="sourceLineNo">949</span>      preRegistrationInitialization();<a name="line.949"></a>
+<span class="sourceLineNo">950</span>    } catch (Throwable e) {<a name="line.950"></a>
+<span class="sourceLineNo">951</span>      abort("Fatal exception during initialization", e);<a name="line.951"></a>
+<span class="sourceLineNo">952</span>    }<a name="line.952"></a>
+<span class="sourceLineNo">953</span><a name="line.953"></a>
+<span class="sourceLineNo">954</span>    try {<a name="line.954"></a>
+<span class="sourceLineNo">955</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.957"></a>
+<span class="sourceLineNo">958</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.958"></a>
+<span class="sourceLineNo">959</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.959"></a>
+<span class="sourceLineNo">960</span>      }<a name="line.960"></a>
+<span class="sourceLineNo">961</span><a name="line.961"></a>
+<span class="sourceLineNo">962</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.962"></a>
+<span class="sourceLineNo">963</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.963"></a>
+<span class="sourceLineNo">964</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.964"></a>
+<span class="sourceLineNo">965</span>      LOG.debug("About to register with Master.");<a name="line.965"></a>
+<span class="sourceLineNo">966</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.966"></a>
+<span class="sourceLineNo">967</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.967"></a>
+<span class="sourceLineNo">968</span>      RetryCounter rc = rcf.create();<a name="line.968"></a>
+<span class="sourceLineNo">969</span>      while (keepLooping()) {<a name="line.969"></a>
+<span class="sourceLineNo">970</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.970"></a>
+<span class="sourceLineNo">971</span>        if (w == null) {<a name="line.971"></a>
+<span class="sourceLineNo">972</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.972"></a>
+<span class="sourceLineNo">973</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.973"></a>
+<span class="sourceLineNo">974</span>          this.sleeper.sleep(sleepTime);<a name="line.974"></a>
+<span class="sourceLineNo">975</span>        } else {<a name="line.975"></a>
+<span class="sourceLineNo">976</span>          handleReportForDutyResponse(w);<a name="line.976"></a>
+<span class="sourceLineNo">977</span>          break;<a name="line.977"></a>
+<span class="sourceLineNo">978</span>        }<a name="line.978"></a>
+<span class="sourceLineNo">979</span>      }<a name="line.979"></a>
+<span class="sourceLineNo">980</span><a name="line.980"></a>
+<span class="sourceLineNo">981</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.981"></a>
+<span class="sourceLineNo">982</span>        // start the snapshot handler and other procedure handlers,<a name="line.982"></a>
+<span class="sourceLineNo">983</span>        // since the server is ready to run<a name="line.983"></a>
+<span class="sourceLineNo">984</span>        if (this.rspmHost != null) {<a name="line.984"></a>
+<span class="sourceLineNo">985</span>          this.rspmHost.start();<a name="line.985"></a>
+<span class="sourceLineNo">986</span>        }<a name="line.986"></a>
+<span class="sourceLineNo">987</span>        // Start the Quota Manager<a name="line.987"></a>
+<span class="sourceLineNo">988</span>        if (this.rsQuotaManager != null) {<a name="line.988"></a>
+<span class="sourceLineNo">989</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.989"></a>
+<span class="sourceLineNo">990</span>        }<a name="line.990"></a>
+<span class="sourceLineNo">991</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.991"></a>
+<span class="sourceLineNo">992</span>          this.rsSpaceQuotaManager.start();<a name="line.992"></a>
+<span class="sourceLineNo">993</span>        }<a name="line.993"></a>
+<span class="sourceLineNo">994</span>      }<a name="line.994"></a>
+<span class="sourceLineNo">995</span><a name="line.995"></a>
+<span class="sourceLineNo">996</span>      // We registered with the Master.  Go into run mode.<a name="line.996"></a>
+<span class="sourceLineNo">997</span>      long lastMsg = System.currentTimeMillis();<a name="line.997"></a>
+<span class="sourceLineNo">998</span>      long oldRequestCount = -1;<a name="line.998"></a>
+<span class="sourceLineNo">999</span>      // The main run loop.<a name="line.999"></a>
+<span class="sourceLineNo">1000</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.1000"></a>
+<span class="sourceLineNo">1001</span>        if (!isClusterUp()) {<a name="line.1001"></a>
+<span class="sourceLineNo">1002</span>          if (isOnlineRegionsEmpty()) {<a name="line.1002"></a>
+<span class="sourceLineNo">1003</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.1003"></a>
+<span class="sourceLineNo">1004</span>          } else if (!this.stopping) {<a name="line.1004"></a>
+<span class="sourceLineNo">1005</span>            this.stopping = true;<a name="line.1005"></a>
+<span class="sourceLineNo">1006</span>            LOG.info("Closing user regions");<a name="line.1006"></a>
+<span class="sourceLineNo">1007</span>            closeUserRegions(this.abortRequested);<a name="line.1007"></a>
+<span class="sourceLineNo">1008</span>          } else if (this.stopping) {<a name="line.1008"></a>
+<span class="sourceLineNo">1009</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1009"></a>
+<span class="sourceLineNo">1010</span>            if (allUserRegionsOffline) {<a name="line.1010"></a>
+<span class="sourceLineNo">1011</span>              // Set stopped if no more write requests tp meta tables<a name="line.1011"></a>
+<span class="sourceLineNo">1012</span>              // since last time we went around the loop.  Any open<a name="line.1012"></a>
+<span class="sourceLineNo">1013</span>              // meta regions will be closed on our way out.<a name="line.1013"></a>
+<span class="sourceLineNo">1014</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1014"></a>
+<span class="sourceLineNo">1015</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1015"></a>
+<span class="sourceLineNo">1016</span>                break;<a name="line.1016"></a>
+<span class="sourceLineNo">1017</span>              }<a name="line.1017"></a>
+<span class="sourceLineNo">1018</span>              oldRequestCount = getWriteRequestCount();<a name="line.1018"></a>
+<span class="sourceLineNo">1019</span>            } else {<a name="line.1019"></a>
+<span class="sourceLineNo">1020</span>              // Make sure all regions have been closed -- some regions may<a name="line.1020"></a>
+<span class="sourceLineNo">1021</span>              // have not got it because we were splitting at the time of<a name="line.1021"></a>
+<span class="sourceLineNo">1022</span>              // the call to closeUserRegions.<a name="line.1022"></a>
+<span class="sourceLineNo">1023</span>              closeUserRegions(this.abortRequested);<a name="line.1023"></a>
+<span class="sourceLineNo">1024</span>            }<a name="line.1024"></a>
+<span class="sourceLineNo">1025</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1025"></a>
+<span class="sourceLineNo">1026</span>          }<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        }<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>        if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          this.sleeper.sleep();<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>        }<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>      } // for<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>    } catch (Throwable t) {<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>      if (!rpcServices.checkOOME(t)) {<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>        String prefix = t instanceof YouAreDeadException? "": "Unhandled: ";<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        abort(prefix + t.getMessage(), t);<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>      }<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>    }<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span><a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>    if (abortRequested) {<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>      Timer abortMonitor = new Timer("Abort regionserver monitor", true);<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>      TimerTask abortTimeoutTask = null;<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>      try {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>        abortTimeoutTask =<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>            Class.forName(conf.get(ABORT_TIMEOUT_TASK, SystemExitWhenAbortTimeout.class.getName()))<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>                .asSubclass(TimerTask.class).getDeclaredConstructor().newInstance();<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>      } catch (Exception e) {<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        LOG.warn("Initialize abort timeout task failed", e);<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>      }<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>      if (abortTimeoutTask != null) {<a name="line.1049">

<TRUNCATED>

[11/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/downloads.html
----------------------------------------------------------------------
diff --git a/downloads.html b/downloads.html
index 0ec7666..ff8774c 100644
--- a/downloads.html
+++ b/downloads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBase Downloads</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -461,7 +461,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/export_control.html
----------------------------------------------------------------------
diff --git a/export_control.html b/export_control.html
index b56855c..ee86b6e 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Export Control
@@ -341,7 +341,7 @@ for more details.</p>
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/index.html
----------------------------------------------------------------------
diff --git a/index.html b/index.html
index 82d7a8a..3b13453 100644
--- a/index.html
+++ b/index.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBaseâ„¢ Home</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -421,7 +421,7 @@ Apache HBase is an open-source, distributed, versioned, non-relational database
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/integration.html
----------------------------------------------------------------------
diff --git a/integration.html b/integration.html
index b0d8ed4..dd4bb76 100644
--- a/integration.html
+++ b/integration.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; CI Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -301,7 +301,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/issue-tracking.html
----------------------------------------------------------------------
diff --git a/issue-tracking.html b/issue-tracking.html
index 0a85140..b94740b 100644
--- a/issue-tracking.html
+++ b/issue-tracking.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Issue Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -298,7 +298,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/license.html
----------------------------------------------------------------------
diff --git a/license.html b/license.html
index 181a16d..42e8a01 100644
--- a/license.html
+++ b/license.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Licenses</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -501,7 +501,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/mail-lists.html
----------------------------------------------------------------------
diff --git a/mail-lists.html b/mail-lists.html
index f61d63e..14b0bcd 100644
--- a/mail-lists.html
+++ b/mail-lists.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Mailing Lists</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -351,7 +351,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/metrics.html
----------------------------------------------------------------------
diff --git a/metrics.html b/metrics.html
index 18830b4..d7b15cb 100644
--- a/metrics.html
+++ b/metrics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) Metrics
@@ -469,7 +469,7 @@ export HBASE_REGIONSERVER_OPTS=&quot;$HBASE_JMX_OPTS -Dcom.sun.management.jmxrem
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/old_news.html
----------------------------------------------------------------------
diff --git a/old_news.html b/old_news.html
index 9b22327..03657be 100644
--- a/old_news.html
+++ b/old_news.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Old Apache HBase (TM) News
@@ -450,7 +450,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/plugin-management.html
----------------------------------------------------------------------
diff --git a/plugin-management.html b/plugin-management.html
index da60e8d..a8b2822 100644
--- a/plugin-management.html
+++ b/plugin-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Plugin Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -450,7 +450,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/plugins.html
----------------------------------------------------------------------
diff --git a/plugins.html b/plugins.html
index 3274eb6..3e16a09 100644
--- a/plugins.html
+++ b/plugins.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Plugins</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -381,7 +381,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/poweredbyhbase.html
----------------------------------------------------------------------
diff --git a/poweredbyhbase.html b/poweredbyhbase.html
index 81690a7..eb3fdaa 100644
--- a/poweredbyhbase.html
+++ b/poweredbyhbase.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Powered By Apache HBase™</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -779,7 +779,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/project-info.html
----------------------------------------------------------------------
diff --git a/project-info.html b/project-info.html
index c4cf804..698243d 100644
--- a/project-info.html
+++ b/project-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -345,7 +345,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/project-reports.html
----------------------------------------------------------------------
diff --git a/project-reports.html b/project-reports.html
index 230c1d6..1de90d4 100644
--- a/project-reports.html
+++ b/project-reports.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Generated Reports</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -315,7 +315,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/project-summary.html
----------------------------------------------------------------------
diff --git a/project-summary.html b/project-summary.html
index 68b9fd9..9763ca3 100644
--- a/project-summary.html
+++ b/project-summary.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Summary</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -341,7 +341,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/pseudo-distributed.html
----------------------------------------------------------------------
diff --git a/pseudo-distributed.html b/pseudo-distributed.html
index 99f1900..feefa12 100644
--- a/pseudo-distributed.html
+++ b/pseudo-distributed.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
 Running Apache HBase (TM) in pseudo-distributed mode
@@ -318,7 +318,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/replication.html
----------------------------------------------------------------------
diff --git a/replication.html b/replication.html
index 45b1543..8c11e9e 100644
--- a/replication.html
+++ b/replication.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Apache HBase (TM) Replication
@@ -313,7 +313,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/resources.html
----------------------------------------------------------------------
diff --git a/resources.html b/resources.html
index c0e9613..23a7142 100644
--- a/resources.html
+++ b/resources.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Other Apache HBase (TM) Resources</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -341,7 +341,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/source-repository.html
----------------------------------------------------------------------
diff --git a/source-repository.html b/source-repository.html
index 935c1f4..5ae05b3 100644
--- a/source-repository.html
+++ b/source-repository.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Source Code Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -309,7 +309,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/sponsors.html
----------------------------------------------------------------------
diff --git a/sponsors.html b/sponsors.html
index 915da88..2c56bac 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Apache HBase™ Sponsors</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -343,7 +343,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/supportingprojects.html
----------------------------------------------------------------------
diff --git a/supportingprojects.html b/supportingprojects.html
index a446583..3548bfb 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Supporting Projects</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -530,7 +530,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/team-list.html
----------------------------------------------------------------------
diff --git a/team-list.html b/team-list.html
index a857ac9..467dea9 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Team</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -776,7 +776,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html b/testdevapidocs/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html
index 0778650..ba9eace 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html
@@ -526,7 +526,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>doPuts</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html#line.148">doPuts</a>(org.apache.hadoop.hbase.regionserver.HRegion&nbsp;region)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html#line.149">doPuts</a>(org.apache.hadoop.hbase.regionserver.HRegion&nbsp;region)
              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -540,7 +540,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>doGets</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html#line.178">doGets</a>(org.apache.hadoop.hbase.regionserver.Region&nbsp;region)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html#line.179">doGets</a>(org.apache.hadoop.hbase.regionserver.Region&nbsp;region)
              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html
index 46f63e2..6ea741d 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.45">TestBlockCacheReporting</a>
+<pre>public class <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.43">TestBlockCacheReporting</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -234,7 +234,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.48">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.46">CLASS_RULE</a></pre>
 </li>
 </ul>
 <a name="LOG">
@@ -243,7 +243,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.51">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.49">LOG</a></pre>
 </li>
 </ul>
 <a name="conf">
@@ -252,7 +252,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>conf</h4>
-<pre>private&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.52">conf</a></pre>
+<pre>private&nbsp;org.apache.hadoop.conf.Configuration <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.50">conf</a></pre>
 </li>
 </ul>
 </li>
@@ -269,7 +269,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestBlockCacheReporting</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.45">TestBlockCacheReporting</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.43">TestBlockCacheReporting</a>()</pre>
 </li>
 </ul>
 </li>
@@ -286,7 +286,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setUp</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.55">setUp</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.53">setUp</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -300,7 +300,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tearDown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.61">tearDown</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.59">tearDown</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -314,7 +314,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>addDataAndHits</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.66">addDataAndHits</a>(org.apache.hadoop.hbase.io.hfile.BlockCache&nbsp;bc,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.64">addDataAndHits</a>(org.apache.hadoop.hbase.io.hfile.BlockCache&nbsp;bc,
                             int&nbsp;count)</pre>
 </li>
 </ul>
@@ -324,14 +324,10 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testBucketCache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.87">testBucketCache</a>()
-                     throws com.fasterxml.jackson.core.JsonGenerationException,
-                            com.fasterxml.jackson.databind.JsonMappingException,
-                            <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.85">testBucketCache</a>()
+                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code>com.fasterxml.jackson.core.JsonGenerationException</code></dd>
-<dd><code>com.fasterxml.jackson.databind.JsonMappingException</code></dd>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
 </dl>
 </li>
@@ -342,14 +338,10 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testLruBlockCache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.105">testLruBlockCache</a>()
-                       throws com.fasterxml.jackson.core.JsonGenerationException,
-                              com.fasterxml.jackson.databind.JsonMappingException,
-                              <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.104">testLruBlockCache</a>()
+                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code>com.fasterxml.jackson.core.JsonGenerationException</code></dd>
-<dd><code>com.fasterxml.jackson.databind.JsonMappingException</code></dd>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
 </dl>
 </li>
@@ -370,13 +362,9 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <li class="blockList">
 <h4>logPerFile</h4>
 <pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.134">logPerFile</a>(org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.CachedBlocksByFile&nbsp;cbsbf)
-                 throws com.fasterxml.jackson.core.JsonGenerationException,
-                        com.fasterxml.jackson.databind.JsonMappingException,
-                        <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code>com.fasterxml.jackson.core.JsonGenerationException</code></dd>
-<dd><code>com.fasterxml.jackson.databind.JsonMappingException</code></dd>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
 </dl>
 </li>
@@ -387,14 +375,10 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>logPerBlock</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.CachedBlocksByFile&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.157">logPerBlock</a>(org.apache.hadoop.hbase.io.hfile.BlockCache&nbsp;bc)
-                                                                                throws com.fasterxml.jackson.core.JsonGenerationException,
-                                                                                       com.fasterxml.jackson.databind.JsonMappingException,
-                                                                                       <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
+<pre>private&nbsp;org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.CachedBlocksByFile&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html#line.156">logPerBlock</a>(org.apache.hadoop.hbase.io.hfile.BlockCache&nbsp;bc)
+                                                                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
-<dd><code>com.fasterxml.jackson.core.JsonGenerationException</code></dd>
-<dd><code>com.fasterxml.jackson.databind.JsonMappingException</code></dd>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></code></dd>
 </dl>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
index fa2b173..f531442 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
@@ -392,7 +392,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testCacheConfigDefaultLRUBlockCache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.276">testCacheConfigDefaultLRUBlockCache</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.277">testCacheConfigDefaultLRUBlockCache</a>()</pre>
 </li>
 </ul>
 <a name="testOffHeapBucketCacheConfig--">
@@ -401,7 +401,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testOffHeapBucketCacheConfig</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.288">testOffHeapBucketCacheConfig</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.290">testOffHeapBucketCacheConfig</a>()</pre>
 <div class="block">Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.</div>
 </li>
 </ul>
@@ -411,7 +411,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testFileBucketCacheConfig</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.294">testFileBucketCacheConfig</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.296">testFileBucketCacheConfig</a>()
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -425,7 +425,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>doBucketCacheConfigTest</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.307">doBucketCacheConfigTest</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.309">doBucketCacheConfigTest</a>()</pre>
 </li>
 </ul>
 <a name="testBucketCacheConfigL1L2Setup--">
@@ -434,7 +434,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testBucketCacheConfigL1L2Setup</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.330">testBucketCacheConfigL1L2Setup</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.333">testBucketCacheConfigL1L2Setup</a>()</pre>
 <div class="block">Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy
  LruBlockCache as L1 with a BucketCache for L2.</div>
 </li>
@@ -445,7 +445,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testL2CacheWithInvalidBucketSize</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.378">testL2CacheWithInvalidBucketSize</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html#line.382">testL2CacheWithInvalidBucketSize</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html
index df5afed..e4114a5 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html
@@ -590,7 +590,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getParameters</h4>
-<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html#line.182">getParameters</a>()
+<pre>public static&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>[]&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html#line.183">getParameters</a>()
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -604,7 +604,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>clearBlockCache</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html#line.196">clearBlockCache</a>(org.apache.hadoop.hbase.io.hfile.BlockCache&nbsp;blockCache)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html#line.197">clearBlockCache</a>(org.apache.hadoop.hbase.io.hfile.BlockCache&nbsp;blockCache)
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -618,7 +618,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setUp</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html#line.222">setUp</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html#line.223">setUp</a>()
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html
index 26e3c21..02cac9e 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html
@@ -504,7 +504,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testCacheBlocks</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html#line.112">testCacheBlocks</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html#line.113">testCacheBlocks</a>()
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -518,7 +518,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writeTestData</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html#line.137">writeTestData</a>(org.apache.hadoop.hbase.regionserver.HRegion&nbsp;region)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html#line.138">writeTestData</a>(org.apache.hadoop.hbase.regionserver.HRegion&nbsp;region)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html
index 9d82303..9a2c2bb 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html
@@ -801,7 +801,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testHFileWriterAndReader</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.597">testHFileWriterAndReader</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.589">testHFileWriterAndReader</a>()
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Testing block index through the HFile writer/reader APIs. Allows to test
  setting index block size through configuration, intermediate-level index
@@ -818,7 +818,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>checkSeekTo</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.723">checkSeekTo</a>(byte[][]&nbsp;keys,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.716">checkSeekTo</a>(byte[][]&nbsp;keys,
                          org.apache.hadoop.hbase.io.hfile.HFileScanner&nbsp;scanner,
                          int&nbsp;i)
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -834,7 +834,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>assertArrayEqualsBuffer</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.729">assertArrayEqualsBuffer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msgPrefix,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.722">assertArrayEqualsBuffer</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msgPrefix,
                                      byte[]&nbsp;arr,
                                      <a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;buf)</pre>
 </li>
@@ -845,7 +845,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>checkKeyValue</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.737">checkKeyValue</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msgPrefix,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.730">checkKeyValue</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msgPrefix,
                            byte[]&nbsp;expectedKey,
                            byte[]&nbsp;expectedValue,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true" title="class or interface in java.nio">ByteBuffer</a>&nbsp;keyRead,
@@ -859,7 +859,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testIntermediateLevelIndicesWithLargeKeys</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.748">testIntermediateLevelIndicesWithLargeKeys</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.741">testIntermediateLevelIndicesWithLargeKeys</a>()
                                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -873,7 +873,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.753">testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.746">testIntermediateLevelIndicesWithLargeKeysWithMinNumEntries</a>()
                                                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -887,7 +887,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testIntermediateLevelIndicesWithLargeKeys</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.758">testIntermediateLevelIndicesWithLargeKeys</a>(int&nbsp;minNumEntries)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.html#line.751">testIntermediateLevelIndicesWithLargeKeys</a>(int&nbsp;minNumEntries)
                                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html
index 3e7abb8..5a7a090 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html
@@ -388,7 +388,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testPrefetchSetInHCDWorks</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.71">testPrefetchSetInHCDWorks</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.72">testPrefetchSetInHCDWorks</a>()</pre>
 </li>
 </ul>
 <a name="testPrefetch--">
@@ -397,7 +397,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testPrefetch</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.81">testPrefetch</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.82">testPrefetch</a>()
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -411,7 +411,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testPrefetchRace</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.87">testPrefetchRace</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.88">testPrefetchRace</a>()
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -425,7 +425,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>readStoreFileLikeScanner</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.98">readStoreFileLikeScanner</a>(org.apache.hadoop.fs.Path&nbsp;storeFilePath)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.99">readStoreFileLikeScanner</a>(org.apache.hadoop.fs.Path&nbsp;storeFilePath)
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <div class="block">Read a storefile in the same manner as a scanner -- using non-positional reads and
  without waiting for prefetch to complete.</div>
@@ -441,7 +441,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>readStoreFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.111">readStoreFile</a>(org.apache.hadoop.fs.Path&nbsp;storeFilePath)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.112">readStoreFile</a>(org.apache.hadoop.fs.Path&nbsp;storeFilePath)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -455,7 +455,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>writeStoreFile</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.136">writeStoreFile</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;fname)
+<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.137">writeStoreFile</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;fname)
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -469,7 +469,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>generateKeyType</h4>
-<pre>public static&nbsp;org.apache.hadoop.hbase.KeyValue.Type&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.166">generateKeyType</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true" title="class or interface in java.util">Random</a>&nbsp;rand)</pre>
+<pre>public static&nbsp;org.apache.hadoop.hbase.KeyValue.Type&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestPrefetch.html#line.167">generateKeyType</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true" title="class or interface in java.util">Random</a>&nbsp;rand)</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html
index e5f8885..ea501a7 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html
@@ -421,7 +421,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tearDown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.94">tearDown</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.95">tearDown</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -435,7 +435,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getName</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.101">getName</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.102">getName</a>()</pre>
 </li>
 </ul>
 <a name="testBasicScanWithLRUCache--">
@@ -444,7 +444,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testBasicScanWithLRUCache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.106">testBasicScanWithLRUCache</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.107">testBasicScanWithLRUCache</a>()
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -458,7 +458,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testBasicScanWithOffheapBucketCache</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.144">testBasicScanWithOffheapBucketCache</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.145">testBasicScanWithOffheapBucketCache</a>()
                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -472,7 +472,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testBasicScanWithOffheapBucketCacheWithMBB</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.185">testBasicScanWithOffheapBucketCacheWithMBB</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.186">testBasicScanWithOffheapBucketCacheWithMBB</a>()
                                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -486,7 +486,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>insertData</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.Cell&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.236">insertData</a>(byte[]&nbsp;row1,
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.Cell&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.237">insertData</a>(byte[]&nbsp;row1,
                                                       byte[]&nbsp;qf1,
                                                       byte[]&nbsp;qf2,
                                                       byte[]&nbsp;fam1,
@@ -507,7 +507,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>performScan</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.Cell&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.291">performScan</a>(byte[]&nbsp;row1,
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.Cell&gt;&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.292">performScan</a>(byte[]&nbsp;row1,
                                                        byte[]&nbsp;fam1)
                                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -522,7 +522,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>initHRegion</h4>
-<pre>private static&nbsp;org.apache.hadoop.hbase.regionserver.HRegion&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.303">initHRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName,
+<pre>private static&nbsp;org.apache.hadoop.hbase.regionserver.HRegion&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.304">initHRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName,
                                                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;callingMethod,
                                                                         org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                                         <a href="../../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a>&nbsp;test_util,
@@ -540,7 +540,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>initHRegion</h4>
-<pre>private static&nbsp;org.apache.hadoop.hbase.regionserver.HRegion&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.308">initHRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName,
+<pre>private static&nbsp;org.apache.hadoop.hbase.regionserver.HRegion&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.309">initHRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName,
                                                                         byte[]&nbsp;startKey,
                                                                         byte[]&nbsp;stopKey,
                                                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;callingMethod,
@@ -561,7 +561,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>initHRegion</h4>
-<pre>private static&nbsp;org.apache.hadoop.hbase.regionserver.HRegion&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.331">initHRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName,
+<pre>private static&nbsp;org.apache.hadoop.hbase.regionserver.HRegion&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html#line.332">initHRegion</a>(org.apache.hadoop.hbase.TableName&nbsp;tableName,
                                                                         byte[]&nbsp;startKey,
                                                                         byte[]&nbsp;stopKey,
                                                                         <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;callingMethod,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html
index df86cea..804c37f 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html
@@ -458,7 +458,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tearDown</h4>
-<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.82">tearDown</a>()
+<pre>public static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.83">tearDown</a>()
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -472,7 +472,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>initHRegion</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.regionserver.HRegion&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.95">initHRegion</a>(byte[]&nbsp;tableName,
+<pre>private&nbsp;org.apache.hadoop.hbase.regionserver.HRegion&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.96">initHRegion</a>(byte[]&nbsp;tableName,
                                                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;callingMethod,
                                                                  org.apache.hadoop.conf.Configuration&nbsp;conf,
                                                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family)
@@ -497,7 +497,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>putData</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.114">putData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.115">putData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;row,
                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;col,
                      long&nbsp;version)
@@ -514,7 +514,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>genValue</h4>
-<pre>private static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.123">genValue</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;row,
+<pre>private static&nbsp;byte[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.124">genValue</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;row,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;col,
                                long&nbsp;version)</pre>
 </li>
@@ -525,7 +525,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>putData</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.127">putData</a>(byte[]&nbsp;cf,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.128">putData</a>(byte[]&nbsp;cf,
                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;row,
                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;col,
                      long&nbsp;versionStart,
@@ -543,7 +543,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getData</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.Cell[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.139">getData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
+<pre>private&nbsp;org.apache.hadoop.hbase.Cell[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.140">getData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;row,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;columns,
                                                int&nbsp;expBlocks)
@@ -560,7 +560,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getData</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.Cell[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.144">getData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
+<pre>private&nbsp;org.apache.hadoop.hbase.Cell[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.145">getData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;row,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&gt;&nbsp;columns,
                                                int&nbsp;expBlocksRowCol,
@@ -579,7 +579,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getData</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.Cell[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.172">getData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
+<pre>private&nbsp;org.apache.hadoop.hbase.Cell[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.173">getData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;row,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;column,
                                                int&nbsp;expBlocks)
@@ -596,7 +596,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getData</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.Cell[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.178">getData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
+<pre>private&nbsp;org.apache.hadoop.hbase.Cell[]&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.179">getData</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;row,
                                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;column,
                                                int&nbsp;expBlocksRowCol,
@@ -615,7 +615,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteFamily</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.185">deleteFamily</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.186">deleteFamily</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;family,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;row,
                           long&nbsp;version)
                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -631,7 +631,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>verifyData</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.194">verifyData</a>(org.apache.hadoop.hbase.Cell&nbsp;kv,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.195">verifyData</a>(org.apache.hadoop.hbase.Cell&nbsp;kv,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;expectedRow,
                                <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;expectedCol,
                                long&nbsp;expectedVersion)</pre>
@@ -643,7 +643,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlkAccessCount</h4>
-<pre>private static&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.202">getBlkAccessCount</a>(byte[]&nbsp;cf)</pre>
+<pre>private static&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.203">getBlkAccessCount</a>(byte[]&nbsp;cf)</pre>
 </li>
 </ul>
 <a name="getBlkCount--">
@@ -652,7 +652,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlkCount</h4>
-<pre>private static&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.206">getBlkCount</a>()</pre>
+<pre>private static&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.207">getBlkCount</a>()</pre>
 </li>
 </ul>
 <a name="testBlocksRead--">
@@ -661,7 +661,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testBlocksRead</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.216">testBlocksRead</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.217">testBlocksRead</a>()
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <div class="block">Test # of blocks read for some simple seek cases.</div>
 <dl>
@@ -676,7 +676,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testLazySeekBlocksRead</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.272">testLazySeekBlocksRead</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.273">testLazySeekBlocksRead</a>()
                             throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <div class="block">Test # of blocks read (targeted at some of the cases Lazy Seek optimizes).</div>
 <dl>
@@ -691,7 +691,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testBlocksStoredWhenCachingDisabled</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.379">testBlocksStoredWhenCachingDisabled</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.380">testBlocksStoredWhenCachingDisabled</a>()
                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <div class="block">Test # of blocks read to ensure disabling cache-fill on Scan works.</div>
 <dl>
@@ -706,7 +706,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testLazySeekBlocksReadWithDelete</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.423">testLazySeekBlocksReadWithDelete</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestBlocksRead.html#line.424">testLazySeekBlocksReadWithDelete</a>()
                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>


[18/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html b/devapidocs/src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html
index c51039f..2a53ba8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html
@@ -50,31 +50,26 @@
 <span class="sourceLineNo">042</span>    instantiateMobFileCache(conf);<a name="line.42"></a>
 <span class="sourceLineNo">043</span>  }<a name="line.43"></a>
 <span class="sourceLineNo">044</span><a name="line.44"></a>
-<span class="sourceLineNo">045</span>  public MobCacheConfig(Configuration conf, boolean needBlockCache) {<a name="line.45"></a>
-<span class="sourceLineNo">046</span>    super(conf, needBlockCache);<a name="line.46"></a>
-<span class="sourceLineNo">047</span>    instantiateMobFileCache(conf);<a name="line.47"></a>
-<span class="sourceLineNo">048</span>  }<a name="line.48"></a>
-<span class="sourceLineNo">049</span><a name="line.49"></a>
-<span class="sourceLineNo">050</span>  /**<a name="line.50"></a>
-<span class="sourceLineNo">051</span>   * Instantiates the MobFileCache.<a name="line.51"></a>
-<span class="sourceLineNo">052</span>   * @param conf The current configuration.<a name="line.52"></a>
-<span class="sourceLineNo">053</span>   * @return The current instance of MobFileCache.<a name="line.53"></a>
-<span class="sourceLineNo">054</span>   */<a name="line.54"></a>
-<span class="sourceLineNo">055</span>  public static synchronized MobFileCache instantiateMobFileCache(Configuration conf) {<a name="line.55"></a>
-<span class="sourceLineNo">056</span>    if (mobFileCache == null) {<a name="line.56"></a>
-<span class="sourceLineNo">057</span>      mobFileCache = new MobFileCache(conf);<a name="line.57"></a>
-<span class="sourceLineNo">058</span>    }<a name="line.58"></a>
-<span class="sourceLineNo">059</span>    return mobFileCache;<a name="line.59"></a>
-<span class="sourceLineNo">060</span>  }<a name="line.60"></a>
-<span class="sourceLineNo">061</span><a name="line.61"></a>
-<span class="sourceLineNo">062</span>  /**<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * Gets the MobFileCache.<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * @return The MobFileCache.<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  public MobFileCache getMobFileCache() {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    return mobFileCache;<a name="line.67"></a>
-<span class="sourceLineNo">068</span>  }<a name="line.68"></a>
-<span class="sourceLineNo">069</span>}<a name="line.69"></a>
+<span class="sourceLineNo">045</span>  /**<a name="line.45"></a>
+<span class="sourceLineNo">046</span>   * Instantiates the MobFileCache.<a name="line.46"></a>
+<span class="sourceLineNo">047</span>   * @param conf The current configuration.<a name="line.47"></a>
+<span class="sourceLineNo">048</span>   * @return The current instance of MobFileCache.<a name="line.48"></a>
+<span class="sourceLineNo">049</span>   */<a name="line.49"></a>
+<span class="sourceLineNo">050</span>  public static synchronized MobFileCache instantiateMobFileCache(Configuration conf) {<a name="line.50"></a>
+<span class="sourceLineNo">051</span>    if (mobFileCache == null) {<a name="line.51"></a>
+<span class="sourceLineNo">052</span>      mobFileCache = new MobFileCache(conf);<a name="line.52"></a>
+<span class="sourceLineNo">053</span>    }<a name="line.53"></a>
+<span class="sourceLineNo">054</span>    return mobFileCache;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>  }<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  /**<a name="line.57"></a>
+<span class="sourceLineNo">058</span>   * Gets the MobFileCache.<a name="line.58"></a>
+<span class="sourceLineNo">059</span>   * @return The MobFileCache.<a name="line.59"></a>
+<span class="sourceLineNo">060</span>   */<a name="line.60"></a>
+<span class="sourceLineNo">061</span>  public MobFileCache getMobFileCache() {<a name="line.61"></a>
+<span class="sourceLineNo">062</span>    return mobFileCache;<a name="line.62"></a>
+<span class="sourceLineNo">063</span>  }<a name="line.63"></a>
+<span class="sourceLineNo">064</span>}<a name="line.64"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html
index 5feaaf2..8b02862 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html
@@ -29,111 +29,112 @@
 <span class="sourceLineNo">021</span>import java.util.ArrayList;<a name="line.21"></a>
 <span class="sourceLineNo">022</span>import java.util.Collection;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.util.List;<a name="line.23"></a>
-<span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.hadoop.fs.Path;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.slf4j.Logger;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.slf4j.LoggerFactory;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.regionserver.CellSink;<a name="line.29"></a>
-<span class="sourceLineNo">030</span><a name="line.30"></a>
-<span class="sourceLineNo">031</span>/**<a name="line.31"></a>
-<span class="sourceLineNo">032</span> * Base class for cell sink that separates the provided cells into multiple files.<a name="line.32"></a>
-<span class="sourceLineNo">033</span> */<a name="line.33"></a>
-<span class="sourceLineNo">034</span>@InterfaceAudience.Private<a name="line.34"></a>
-<span class="sourceLineNo">035</span>public abstract class AbstractMultiFileWriter implements CellSink, ShipperListener {<a name="line.35"></a>
+<span class="sourceLineNo">024</span>import org.apache.hadoop.fs.Path;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.slf4j.Logger;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.slf4j.LoggerFactory;<a name="line.27"></a>
+<span class="sourceLineNo">028</span><a name="line.28"></a>
+<span class="sourceLineNo">029</span>/**<a name="line.29"></a>
+<span class="sourceLineNo">030</span> * Base class for cell sink that separates the provided cells into multiple files.<a name="line.30"></a>
+<span class="sourceLineNo">031</span> */<a name="line.31"></a>
+<span class="sourceLineNo">032</span>@InterfaceAudience.Private<a name="line.32"></a>
+<span class="sourceLineNo">033</span>public abstract class AbstractMultiFileWriter implements CellSink, ShipperListener {<a name="line.33"></a>
+<span class="sourceLineNo">034</span><a name="line.34"></a>
+<span class="sourceLineNo">035</span>  private static final Logger LOG = LoggerFactory.getLogger(AbstractMultiFileWriter.class);<a name="line.35"></a>
 <span class="sourceLineNo">036</span><a name="line.36"></a>
-<span class="sourceLineNo">037</span>  private static final Logger LOG = LoggerFactory.getLogger(AbstractMultiFileWriter.class);<a name="line.37"></a>
-<span class="sourceLineNo">038</span><a name="line.38"></a>
-<span class="sourceLineNo">039</span>  /** Factory that is used to produce single StoreFile.Writer-s */<a name="line.39"></a>
-<span class="sourceLineNo">040</span>  protected WriterFactory writerFactory;<a name="line.40"></a>
-<span class="sourceLineNo">041</span><a name="line.41"></a>
-<span class="sourceLineNo">042</span>  /** Source scanner that is tracking KV count; may be null if source is not StoreScanner */<a name="line.42"></a>
-<span class="sourceLineNo">043</span>  protected StoreScanner sourceScanner;<a name="line.43"></a>
-<span class="sourceLineNo">044</span><a name="line.44"></a>
-<span class="sourceLineNo">045</span>  public interface WriterFactory {<a name="line.45"></a>
-<span class="sourceLineNo">046</span>    public StoreFileWriter createWriter() throws IOException;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>  }<a name="line.47"></a>
-<span class="sourceLineNo">048</span><a name="line.48"></a>
-<span class="sourceLineNo">049</span>  /**<a name="line.49"></a>
-<span class="sourceLineNo">050</span>   * Initializes multi-writer before usage.<a name="line.50"></a>
-<span class="sourceLineNo">051</span>   * @param sourceScanner Optional store scanner to obtain the information about read progress.<a name="line.51"></a>
-<span class="sourceLineNo">052</span>   * @param factory Factory used to produce individual file writers.<a name="line.52"></a>
-<span class="sourceLineNo">053</span>   */<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  public void init(StoreScanner sourceScanner, WriterFactory factory) {<a name="line.54"></a>
-<span class="sourceLineNo">055</span>    this.writerFactory = factory;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>    this.sourceScanner = sourceScanner;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  }<a name="line.57"></a>
-<span class="sourceLineNo">058</span><a name="line.58"></a>
-<span class="sourceLineNo">059</span>  /**<a name="line.59"></a>
-<span class="sourceLineNo">060</span>   * Commit all writers.<a name="line.60"></a>
-<span class="sourceLineNo">061</span>   * &lt;p&gt;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * Notice that here we use the same &lt;code&gt;maxSeqId&lt;/code&gt; for all output files since we haven't<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * find an easy to find enough sequence ids for different output files in some corner cases. See<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * comments in HBASE-15400 for more details.<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  public List&lt;Path&gt; commitWriters(long maxSeqId, boolean majorCompaction) throws IOException {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    preCommitWriters();<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    Collection&lt;StoreFileWriter&gt; writers = this.writers();<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    if (LOG.isDebugEnabled()) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      LOG.debug("Commit " + writers.size() + " writers, maxSeqId=" + maxSeqId<a name="line.70"></a>
-<span class="sourceLineNo">071</span>          + ", majorCompaction=" + majorCompaction);<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    }<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    List&lt;Path&gt; paths = new ArrayList&lt;&gt;();<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    for (StoreFileWriter writer : writers) {<a name="line.74"></a>
-<span class="sourceLineNo">075</span>      if (writer == null) {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>        continue;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>      }<a name="line.77"></a>
-<span class="sourceLineNo">078</span>      writer.appendMetadata(maxSeqId, majorCompaction);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>      preCloseWriter(writer);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      paths.add(writer.getPath());<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      writer.close();<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    }<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    return paths;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  }<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  /**<a name="line.86"></a>
-<span class="sourceLineNo">087</span>   * Close all writers without throwing any exceptions. This is used when compaction failed usually.<a name="line.87"></a>
-<span class="sourceLineNo">088</span>   */<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public List&lt;Path&gt; abortWriters() {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    List&lt;Path&gt; paths = new ArrayList&lt;&gt;();<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    for (StoreFileWriter writer : writers()) {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>      try {<a name="line.92"></a>
-<span class="sourceLineNo">093</span>        if (writer != null) {<a name="line.93"></a>
-<span class="sourceLineNo">094</span>          paths.add(writer.getPath());<a name="line.94"></a>
-<span class="sourceLineNo">095</span>          writer.close();<a name="line.95"></a>
-<span class="sourceLineNo">096</span>        }<a name="line.96"></a>
-<span class="sourceLineNo">097</span>      } catch (Exception ex) {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        LOG.error("Failed to close the writer after an unfinished compaction.", ex);<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      }<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    }<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    return paths;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  }<a name="line.102"></a>
+<span class="sourceLineNo">037</span>  /** Factory that is used to produce single StoreFile.Writer-s */<a name="line.37"></a>
+<span class="sourceLineNo">038</span>  protected WriterFactory writerFactory;<a name="line.38"></a>
+<span class="sourceLineNo">039</span><a name="line.39"></a>
+<span class="sourceLineNo">040</span>  /** Source scanner that is tracking KV count; may be null if source is not StoreScanner */<a name="line.40"></a>
+<span class="sourceLineNo">041</span>  protected StoreScanner sourceScanner;<a name="line.41"></a>
+<span class="sourceLineNo">042</span><a name="line.42"></a>
+<span class="sourceLineNo">043</span>  public interface WriterFactory {<a name="line.43"></a>
+<span class="sourceLineNo">044</span>    public StoreFileWriter createWriter() throws IOException;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>  }<a name="line.45"></a>
+<span class="sourceLineNo">046</span><a name="line.46"></a>
+<span class="sourceLineNo">047</span>  /**<a name="line.47"></a>
+<span class="sourceLineNo">048</span>   * Initializes multi-writer before usage.<a name="line.48"></a>
+<span class="sourceLineNo">049</span>   * @param sourceScanner Optional store scanner to obtain the information about read progress.<a name="line.49"></a>
+<span class="sourceLineNo">050</span>   * @param factory Factory used to produce individual file writers.<a name="line.50"></a>
+<span class="sourceLineNo">051</span>   */<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  public void init(StoreScanner sourceScanner, WriterFactory factory) {<a name="line.52"></a>
+<span class="sourceLineNo">053</span>    this.writerFactory = factory;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>    this.sourceScanner = sourceScanner;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>  }<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  /**<a name="line.57"></a>
+<span class="sourceLineNo">058</span>   * Commit all writers.<a name="line.58"></a>
+<span class="sourceLineNo">059</span>   * &lt;p&gt;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>   * Notice that here we use the same &lt;code&gt;maxSeqId&lt;/code&gt; for all output files since we haven't<a name="line.60"></a>
+<span class="sourceLineNo">061</span>   * find an easy to find enough sequence ids for different output files in some corner cases. See<a name="line.61"></a>
+<span class="sourceLineNo">062</span>   * comments in HBASE-15400 for more details.<a name="line.62"></a>
+<span class="sourceLineNo">063</span>   */<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  public List&lt;Path&gt; commitWriters(long maxSeqId, boolean majorCompaction) throws IOException {<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    preCommitWriters();<a name="line.65"></a>
+<span class="sourceLineNo">066</span>    Collection&lt;StoreFileWriter&gt; writers = this.writers();<a name="line.66"></a>
+<span class="sourceLineNo">067</span>    if (LOG.isDebugEnabled()) {<a name="line.67"></a>
+<span class="sourceLineNo">068</span>      LOG.debug("Commit " + writers.size() + " writers, maxSeqId=" + maxSeqId<a name="line.68"></a>
+<span class="sourceLineNo">069</span>          + ", majorCompaction=" + majorCompaction);<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    }<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    List&lt;Path&gt; paths = new ArrayList&lt;&gt;();<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    for (StoreFileWriter writer : writers) {<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      if (writer == null) {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>        continue;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>      }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>      writer.appendMetadata(maxSeqId, majorCompaction);<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      preCloseWriter(writer);<a name="line.77"></a>
+<span class="sourceLineNo">078</span>      paths.add(writer.getPath());<a name="line.78"></a>
+<span class="sourceLineNo">079</span>      writer.close();<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    }<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    return paths;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * Close all writers without throwing any exceptions. This is used when compaction failed usually.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  public List&lt;Path&gt; abortWriters() {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    List&lt;Path&gt; paths = new ArrayList&lt;&gt;();<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    for (StoreFileWriter writer : writers()) {<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      try {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>        if (writer != null) {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>          paths.add(writer.getPath());<a name="line.92"></a>
+<span class="sourceLineNo">093</span>          writer.close();<a name="line.93"></a>
+<span class="sourceLineNo">094</span>        }<a name="line.94"></a>
+<span class="sourceLineNo">095</span>      } catch (Exception ex) {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        LOG.error("Failed to close the writer after an unfinished compaction.", ex);<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      }<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    return paths;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>  protected abstract Collection&lt;StoreFileWriter&gt; writers();<a name="line.102"></a>
 <span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>  protected abstract Collection&lt;StoreFileWriter&gt; writers();<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  /**<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * Subclasses override this method to be called at the end of a successful sequence of append; all<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * appends are processed before this method is called.<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   */<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  protected void preCommitWriters() throws IOException {<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  }<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  /**<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   * Subclasses override this method to be called before we close the give writer. Usually you can<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   * append extra metadata to the writer.<a name="line.115"></a>
-<span class="sourceLineNo">116</span>   */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  protected void preCloseWriter(StoreFileWriter writer) throws IOException {<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  @Override<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  public void beforeShipped() throws IOException {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    if (this.writers() != null) {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      for (StoreFileWriter writer : writers()) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        writer.beforeShipped();<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      }<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>  }<a name="line.127"></a>
-<span class="sourceLineNo">128</span>}<a name="line.128"></a>
+<span class="sourceLineNo">104</span>  /**<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * Subclasses override this method to be called at the end of a successful sequence of append; all<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   * appends are processed before this method is called.<a name="line.106"></a>
+<span class="sourceLineNo">107</span>   */<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  protected void preCommitWriters() throws IOException {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>  /**<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Subclasses override this method to be called before we close the give writer. Usually you can<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * append extra metadata to the writer.<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   */<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected void preCloseWriter(StoreFileWriter writer) throws IOException {<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  @Override<a name="line.118"></a>
+<span class="sourceLineNo">119</span>  public void beforeShipped() throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    Collection&lt;StoreFileWriter&gt; writers = writers();<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    if (writers != null) {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>      for (StoreFileWriter writer : writers) {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>        if (writer != null) {<a name="line.123"></a>
+<span class="sourceLineNo">124</span>          writer.beforeShipped();<a name="line.124"></a>
+<span class="sourceLineNo">125</span>        }<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      }<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>}<a name="line.129"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html
index 5feaaf2..8b02862 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html
@@ -29,111 +29,112 @@
 <span class="sourceLineNo">021</span>import java.util.ArrayList;<a name="line.21"></a>
 <span class="sourceLineNo">022</span>import java.util.Collection;<a name="line.22"></a>
 <span class="sourceLineNo">023</span>import java.util.List;<a name="line.23"></a>
-<span class="sourceLineNo">024</span><a name="line.24"></a>
-<span class="sourceLineNo">025</span>import org.apache.hadoop.fs.Path;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import org.slf4j.Logger;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.slf4j.LoggerFactory;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.regionserver.CellSink;<a name="line.29"></a>
-<span class="sourceLineNo">030</span><a name="line.30"></a>
-<span class="sourceLineNo">031</span>/**<a name="line.31"></a>
-<span class="sourceLineNo">032</span> * Base class for cell sink that separates the provided cells into multiple files.<a name="line.32"></a>
-<span class="sourceLineNo">033</span> */<a name="line.33"></a>
-<span class="sourceLineNo">034</span>@InterfaceAudience.Private<a name="line.34"></a>
-<span class="sourceLineNo">035</span>public abstract class AbstractMultiFileWriter implements CellSink, ShipperListener {<a name="line.35"></a>
+<span class="sourceLineNo">024</span>import org.apache.hadoop.fs.Path;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import org.apache.yetus.audience.InterfaceAudience;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.slf4j.Logger;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.slf4j.LoggerFactory;<a name="line.27"></a>
+<span class="sourceLineNo">028</span><a name="line.28"></a>
+<span class="sourceLineNo">029</span>/**<a name="line.29"></a>
+<span class="sourceLineNo">030</span> * Base class for cell sink that separates the provided cells into multiple files.<a name="line.30"></a>
+<span class="sourceLineNo">031</span> */<a name="line.31"></a>
+<span class="sourceLineNo">032</span>@InterfaceAudience.Private<a name="line.32"></a>
+<span class="sourceLineNo">033</span>public abstract class AbstractMultiFileWriter implements CellSink, ShipperListener {<a name="line.33"></a>
+<span class="sourceLineNo">034</span><a name="line.34"></a>
+<span class="sourceLineNo">035</span>  private static final Logger LOG = LoggerFactory.getLogger(AbstractMultiFileWriter.class);<a name="line.35"></a>
 <span class="sourceLineNo">036</span><a name="line.36"></a>
-<span class="sourceLineNo">037</span>  private static final Logger LOG = LoggerFactory.getLogger(AbstractMultiFileWriter.class);<a name="line.37"></a>
-<span class="sourceLineNo">038</span><a name="line.38"></a>
-<span class="sourceLineNo">039</span>  /** Factory that is used to produce single StoreFile.Writer-s */<a name="line.39"></a>
-<span class="sourceLineNo">040</span>  protected WriterFactory writerFactory;<a name="line.40"></a>
-<span class="sourceLineNo">041</span><a name="line.41"></a>
-<span class="sourceLineNo">042</span>  /** Source scanner that is tracking KV count; may be null if source is not StoreScanner */<a name="line.42"></a>
-<span class="sourceLineNo">043</span>  protected StoreScanner sourceScanner;<a name="line.43"></a>
-<span class="sourceLineNo">044</span><a name="line.44"></a>
-<span class="sourceLineNo">045</span>  public interface WriterFactory {<a name="line.45"></a>
-<span class="sourceLineNo">046</span>    public StoreFileWriter createWriter() throws IOException;<a name="line.46"></a>
-<span class="sourceLineNo">047</span>  }<a name="line.47"></a>
-<span class="sourceLineNo">048</span><a name="line.48"></a>
-<span class="sourceLineNo">049</span>  /**<a name="line.49"></a>
-<span class="sourceLineNo">050</span>   * Initializes multi-writer before usage.<a name="line.50"></a>
-<span class="sourceLineNo">051</span>   * @param sourceScanner Optional store scanner to obtain the information about read progress.<a name="line.51"></a>
-<span class="sourceLineNo">052</span>   * @param factory Factory used to produce individual file writers.<a name="line.52"></a>
-<span class="sourceLineNo">053</span>   */<a name="line.53"></a>
-<span class="sourceLineNo">054</span>  public void init(StoreScanner sourceScanner, WriterFactory factory) {<a name="line.54"></a>
-<span class="sourceLineNo">055</span>    this.writerFactory = factory;<a name="line.55"></a>
-<span class="sourceLineNo">056</span>    this.sourceScanner = sourceScanner;<a name="line.56"></a>
-<span class="sourceLineNo">057</span>  }<a name="line.57"></a>
-<span class="sourceLineNo">058</span><a name="line.58"></a>
-<span class="sourceLineNo">059</span>  /**<a name="line.59"></a>
-<span class="sourceLineNo">060</span>   * Commit all writers.<a name="line.60"></a>
-<span class="sourceLineNo">061</span>   * &lt;p&gt;<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * Notice that here we use the same &lt;code&gt;maxSeqId&lt;/code&gt; for all output files since we haven't<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * find an easy to find enough sequence ids for different output files in some corner cases. See<a name="line.63"></a>
-<span class="sourceLineNo">064</span>   * comments in HBASE-15400 for more details.<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   */<a name="line.65"></a>
-<span class="sourceLineNo">066</span>  public List&lt;Path&gt; commitWriters(long maxSeqId, boolean majorCompaction) throws IOException {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    preCommitWriters();<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    Collection&lt;StoreFileWriter&gt; writers = this.writers();<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    if (LOG.isDebugEnabled()) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      LOG.debug("Commit " + writers.size() + " writers, maxSeqId=" + maxSeqId<a name="line.70"></a>
-<span class="sourceLineNo">071</span>          + ", majorCompaction=" + majorCompaction);<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    }<a name="line.72"></a>
-<span class="sourceLineNo">073</span>    List&lt;Path&gt; paths = new ArrayList&lt;&gt;();<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    for (StoreFileWriter writer : writers) {<a name="line.74"></a>
-<span class="sourceLineNo">075</span>      if (writer == null) {<a name="line.75"></a>
-<span class="sourceLineNo">076</span>        continue;<a name="line.76"></a>
-<span class="sourceLineNo">077</span>      }<a name="line.77"></a>
-<span class="sourceLineNo">078</span>      writer.appendMetadata(maxSeqId, majorCompaction);<a name="line.78"></a>
-<span class="sourceLineNo">079</span>      preCloseWriter(writer);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>      paths.add(writer.getPath());<a name="line.80"></a>
-<span class="sourceLineNo">081</span>      writer.close();<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    }<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    return paths;<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  }<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  /**<a name="line.86"></a>
-<span class="sourceLineNo">087</span>   * Close all writers without throwing any exceptions. This is used when compaction failed usually.<a name="line.87"></a>
-<span class="sourceLineNo">088</span>   */<a name="line.88"></a>
-<span class="sourceLineNo">089</span>  public List&lt;Path&gt; abortWriters() {<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    List&lt;Path&gt; paths = new ArrayList&lt;&gt;();<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    for (StoreFileWriter writer : writers()) {<a name="line.91"></a>
-<span class="sourceLineNo">092</span>      try {<a name="line.92"></a>
-<span class="sourceLineNo">093</span>        if (writer != null) {<a name="line.93"></a>
-<span class="sourceLineNo">094</span>          paths.add(writer.getPath());<a name="line.94"></a>
-<span class="sourceLineNo">095</span>          writer.close();<a name="line.95"></a>
-<span class="sourceLineNo">096</span>        }<a name="line.96"></a>
-<span class="sourceLineNo">097</span>      } catch (Exception ex) {<a name="line.97"></a>
-<span class="sourceLineNo">098</span>        LOG.error("Failed to close the writer after an unfinished compaction.", ex);<a name="line.98"></a>
-<span class="sourceLineNo">099</span>      }<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    }<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    return paths;<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  }<a name="line.102"></a>
+<span class="sourceLineNo">037</span>  /** Factory that is used to produce single StoreFile.Writer-s */<a name="line.37"></a>
+<span class="sourceLineNo">038</span>  protected WriterFactory writerFactory;<a name="line.38"></a>
+<span class="sourceLineNo">039</span><a name="line.39"></a>
+<span class="sourceLineNo">040</span>  /** Source scanner that is tracking KV count; may be null if source is not StoreScanner */<a name="line.40"></a>
+<span class="sourceLineNo">041</span>  protected StoreScanner sourceScanner;<a name="line.41"></a>
+<span class="sourceLineNo">042</span><a name="line.42"></a>
+<span class="sourceLineNo">043</span>  public interface WriterFactory {<a name="line.43"></a>
+<span class="sourceLineNo">044</span>    public StoreFileWriter createWriter() throws IOException;<a name="line.44"></a>
+<span class="sourceLineNo">045</span>  }<a name="line.45"></a>
+<span class="sourceLineNo">046</span><a name="line.46"></a>
+<span class="sourceLineNo">047</span>  /**<a name="line.47"></a>
+<span class="sourceLineNo">048</span>   * Initializes multi-writer before usage.<a name="line.48"></a>
+<span class="sourceLineNo">049</span>   * @param sourceScanner Optional store scanner to obtain the information about read progress.<a name="line.49"></a>
+<span class="sourceLineNo">050</span>   * @param factory Factory used to produce individual file writers.<a name="line.50"></a>
+<span class="sourceLineNo">051</span>   */<a name="line.51"></a>
+<span class="sourceLineNo">052</span>  public void init(StoreScanner sourceScanner, WriterFactory factory) {<a name="line.52"></a>
+<span class="sourceLineNo">053</span>    this.writerFactory = factory;<a name="line.53"></a>
+<span class="sourceLineNo">054</span>    this.sourceScanner = sourceScanner;<a name="line.54"></a>
+<span class="sourceLineNo">055</span>  }<a name="line.55"></a>
+<span class="sourceLineNo">056</span><a name="line.56"></a>
+<span class="sourceLineNo">057</span>  /**<a name="line.57"></a>
+<span class="sourceLineNo">058</span>   * Commit all writers.<a name="line.58"></a>
+<span class="sourceLineNo">059</span>   * &lt;p&gt;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>   * Notice that here we use the same &lt;code&gt;maxSeqId&lt;/code&gt; for all output files since we haven't<a name="line.60"></a>
+<span class="sourceLineNo">061</span>   * find an easy to find enough sequence ids for different output files in some corner cases. See<a name="line.61"></a>
+<span class="sourceLineNo">062</span>   * comments in HBASE-15400 for more details.<a name="line.62"></a>
+<span class="sourceLineNo">063</span>   */<a name="line.63"></a>
+<span class="sourceLineNo">064</span>  public List&lt;Path&gt; commitWriters(long maxSeqId, boolean majorCompaction) throws IOException {<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    preCommitWriters();<a name="line.65"></a>
+<span class="sourceLineNo">066</span>    Collection&lt;StoreFileWriter&gt; writers = this.writers();<a name="line.66"></a>
+<span class="sourceLineNo">067</span>    if (LOG.isDebugEnabled()) {<a name="line.67"></a>
+<span class="sourceLineNo">068</span>      LOG.debug("Commit " + writers.size() + " writers, maxSeqId=" + maxSeqId<a name="line.68"></a>
+<span class="sourceLineNo">069</span>          + ", majorCompaction=" + majorCompaction);<a name="line.69"></a>
+<span class="sourceLineNo">070</span>    }<a name="line.70"></a>
+<span class="sourceLineNo">071</span>    List&lt;Path&gt; paths = new ArrayList&lt;&gt;();<a name="line.71"></a>
+<span class="sourceLineNo">072</span>    for (StoreFileWriter writer : writers) {<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      if (writer == null) {<a name="line.73"></a>
+<span class="sourceLineNo">074</span>        continue;<a name="line.74"></a>
+<span class="sourceLineNo">075</span>      }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>      writer.appendMetadata(maxSeqId, majorCompaction);<a name="line.76"></a>
+<span class="sourceLineNo">077</span>      preCloseWriter(writer);<a name="line.77"></a>
+<span class="sourceLineNo">078</span>      paths.add(writer.getPath());<a name="line.78"></a>
+<span class="sourceLineNo">079</span>      writer.close();<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    }<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    return paths;<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  /**<a name="line.84"></a>
+<span class="sourceLineNo">085</span>   * Close all writers without throwing any exceptions. This is used when compaction failed usually.<a name="line.85"></a>
+<span class="sourceLineNo">086</span>   */<a name="line.86"></a>
+<span class="sourceLineNo">087</span>  public List&lt;Path&gt; abortWriters() {<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    List&lt;Path&gt; paths = new ArrayList&lt;&gt;();<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    for (StoreFileWriter writer : writers()) {<a name="line.89"></a>
+<span class="sourceLineNo">090</span>      try {<a name="line.90"></a>
+<span class="sourceLineNo">091</span>        if (writer != null) {<a name="line.91"></a>
+<span class="sourceLineNo">092</span>          paths.add(writer.getPath());<a name="line.92"></a>
+<span class="sourceLineNo">093</span>          writer.close();<a name="line.93"></a>
+<span class="sourceLineNo">094</span>        }<a name="line.94"></a>
+<span class="sourceLineNo">095</span>      } catch (Exception ex) {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>        LOG.error("Failed to close the writer after an unfinished compaction.", ex);<a name="line.96"></a>
+<span class="sourceLineNo">097</span>      }<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    }<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    return paths;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>  protected abstract Collection&lt;StoreFileWriter&gt; writers();<a name="line.102"></a>
 <span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>  protected abstract Collection&lt;StoreFileWriter&gt; writers();<a name="line.104"></a>
-<span class="sourceLineNo">105</span><a name="line.105"></a>
-<span class="sourceLineNo">106</span>  /**<a name="line.106"></a>
-<span class="sourceLineNo">107</span>   * Subclasses override this method to be called at the end of a successful sequence of append; all<a name="line.107"></a>
-<span class="sourceLineNo">108</span>   * appends are processed before this method is called.<a name="line.108"></a>
-<span class="sourceLineNo">109</span>   */<a name="line.109"></a>
-<span class="sourceLineNo">110</span>  protected void preCommitWriters() throws IOException {<a name="line.110"></a>
-<span class="sourceLineNo">111</span>  }<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>  /**<a name="line.113"></a>
-<span class="sourceLineNo">114</span>   * Subclasses override this method to be called before we close the give writer. Usually you can<a name="line.114"></a>
-<span class="sourceLineNo">115</span>   * append extra metadata to the writer.<a name="line.115"></a>
-<span class="sourceLineNo">116</span>   */<a name="line.116"></a>
-<span class="sourceLineNo">117</span>  protected void preCloseWriter(StoreFileWriter writer) throws IOException {<a name="line.117"></a>
-<span class="sourceLineNo">118</span>  }<a name="line.118"></a>
-<span class="sourceLineNo">119</span><a name="line.119"></a>
-<span class="sourceLineNo">120</span>  @Override<a name="line.120"></a>
-<span class="sourceLineNo">121</span>  public void beforeShipped() throws IOException {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    if (this.writers() != null) {<a name="line.122"></a>
-<span class="sourceLineNo">123</span>      for (StoreFileWriter writer : writers()) {<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        writer.beforeShipped();<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      }<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    }<a name="line.126"></a>
-<span class="sourceLineNo">127</span>  }<a name="line.127"></a>
-<span class="sourceLineNo">128</span>}<a name="line.128"></a>
+<span class="sourceLineNo">104</span>  /**<a name="line.104"></a>
+<span class="sourceLineNo">105</span>   * Subclasses override this method to be called at the end of a successful sequence of append; all<a name="line.105"></a>
+<span class="sourceLineNo">106</span>   * appends are processed before this method is called.<a name="line.106"></a>
+<span class="sourceLineNo">107</span>   */<a name="line.107"></a>
+<span class="sourceLineNo">108</span>  protected void preCommitWriters() throws IOException {<a name="line.108"></a>
+<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
+<span class="sourceLineNo">110</span><a name="line.110"></a>
+<span class="sourceLineNo">111</span>  /**<a name="line.111"></a>
+<span class="sourceLineNo">112</span>   * Subclasses override this method to be called before we close the give writer. Usually you can<a name="line.112"></a>
+<span class="sourceLineNo">113</span>   * append extra metadata to the writer.<a name="line.113"></a>
+<span class="sourceLineNo">114</span>   */<a name="line.114"></a>
+<span class="sourceLineNo">115</span>  protected void preCloseWriter(StoreFileWriter writer) throws IOException {<a name="line.115"></a>
+<span class="sourceLineNo">116</span>  }<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>  @Override<a name="line.118"></a>
+<span class="sourceLineNo">119</span>  public void beforeShipped() throws IOException {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    Collection&lt;StoreFileWriter&gt; writers = writers();<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    if (writers != null) {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>      for (StoreFileWriter writer : writers) {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>        if (writer != null) {<a name="line.123"></a>
+<span class="sourceLineNo">124</span>          writer.beforeShipped();<a name="line.124"></a>
+<span class="sourceLineNo">125</span>        }<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      }<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>  }<a name="line.128"></a>
+<span class="sourceLineNo">129</span>}<a name="line.129"></a>
 
 
 


[09/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
index f8e34f2..ea22449 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.DataCacheEntry.html
@@ -217,184 +217,188 @@
 <span class="sourceLineNo">209</span>  @Test<a name="line.209"></a>
 <span class="sourceLineNo">210</span>  public void testDisableCacheDataBlock() throws IOException {<a name="line.210"></a>
 <span class="sourceLineNo">211</span>    Configuration conf = HBaseConfiguration.create();<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    CacheConfig cacheConfig = new CacheConfig(conf);<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    assertFalse(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.221"></a>
-<span class="sourceLineNo">222</span>    assertFalse(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    cacheConfig = new CacheConfig(conf);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    assertTrue(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    assertTrue(cacheConfig.shouldCacheDataCompressed());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    assertTrue(cacheConfig.shouldCacheDataOnWrite());<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, false);<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>    cacheConfig = new CacheConfig(conf);<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.254"></a>
-<span class="sourceLineNo">255</span><a name="line.255"></a>
-<span class="sourceLineNo">256</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, true);<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>    HColumnDescriptor family = new HColumnDescriptor("testDisableCacheDataBlock");<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    family.setBlockCacheEnabled(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    cacheConfig = new CacheConfig(conf, family);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.269"></a>
-<span class="sourceLineNo">270</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>  @Test<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  public void testCacheConfigDefaultLRUBlockCache() {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    basicBlockCacheOps(cc, false, true);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    assertTrue(cc.getBlockCache() instanceof LruBlockCache);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   */<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  @Test<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  public void testOffHeapBucketCacheConfig() {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    doBucketCacheConfigTest();<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>  @Test<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  public void testFileBucketCacheConfig() throws IOException {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    try {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      Path p = new Path(htu.getDataTestDir(), "bc.txt");<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      FileSystem fs = FileSystem.get(this.conf);<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      fs.create(p).close();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      doBucketCacheConfigTest();<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    } finally {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      htu.cleanupTestDir();<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  }<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  private void doBucketCacheConfigTest() {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final int bcSize = 100;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    basicBlockCacheOps(cc, false, false);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    BlockCache [] bcs = cbc.getBlockCaches();<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    assertTrue(bcs[0] instanceof LruBlockCache);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    LruBlockCache lbc = (LruBlockCache)bcs[0];<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    assertEquals(MemorySizeUtil.getOnHeapCacheSize(this.conf), lbc.getMaxSize());<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    assertTrue(bcs[1] instanceof BucketCache);<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    BucketCache bc = (BucketCache)bcs[1];<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  }<a name="line.323"></a>
-<span class="sourceLineNo">324</span><a name="line.324"></a>
-<span class="sourceLineNo">325</span>  /**<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * LruBlockCache as L1 with a BucketCache for L2.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  @Test<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  public void testBucketCacheConfigL1L2Setup() {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    // from L1 happens, it does not fail because L2 can't take the eviction because block too big.<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    long lruExpectedSize = MemorySizeUtil.getOnHeapCacheSize(this.conf);<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    final int bcSize = 100;<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    long bcExpectedSize = 100 * 1024 * 1024; // MB.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    assertTrue(lruExpectedSize &lt; bcExpectedSize);<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    basicBlockCacheOps(cc, false, false);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    LruBlockCache lbc = cbc.onHeapCache;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    assertEquals(lruExpectedSize, lbc.getMaxSize());<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    BlockCache bc = cbc.l2Cache;<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    long initialL1BlockCount = lbc.getBlockCount();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    long initialL2BlockCount = bc.getBlockCount();<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    Cacheable c = new DataCacheEntry();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    BlockCacheKey bck = new BlockCacheKey("bck", 0);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    lbc.cacheBlock(bck, c, false);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    assertEquals(initialL2BlockCount, bc.getBlockCount());<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    // Force evictions by putting in a block too big.<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    final long justTooBigSize = lbc.acceptableSize() + 1;<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      public long heapSize() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>        return justTooBigSize;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>      @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      public int getSerializedLength() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>        return (int)heapSize();<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      }<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    });<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // The eviction thread in lrublockcache needs to run.<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    assertEquals(initialL1BlockCount, lbc.getBlockCount());<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  @Test<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public void testL2CacheWithInvalidBucketSize() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    Configuration c = new Configuration(this.conf);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    try {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      CacheConfig.getBucketCache(c);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      fail("Should throw IllegalArgumentException when passing illegal value for bucket size");<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    } catch (IllegalArgumentException e) {<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>}<a name="line.389"></a>
+<span class="sourceLineNo">212</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    CacheConfig cacheConfig = new CacheConfig(conf);<a name="line.213"></a>
+<span class="sourceLineNo">214</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    assertFalse(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.222"></a>
+<span class="sourceLineNo">223</span>    assertFalse(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.228"></a>
+<span class="sourceLineNo">229</span><a name="line.229"></a>
+<span class="sourceLineNo">230</span>    cacheConfig = new CacheConfig(conf);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    assertTrue(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    assertTrue(cacheConfig.shouldCacheDataCompressed());<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    assertTrue(cacheConfig.shouldCacheDataOnWrite());<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.238"></a>
+<span class="sourceLineNo">239</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.240"></a>
+<span class="sourceLineNo">241</span><a name="line.241"></a>
+<span class="sourceLineNo">242</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, false);<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.243"></a>
+<span class="sourceLineNo">244</span><a name="line.244"></a>
+<span class="sourceLineNo">245</span>    cacheConfig = new CacheConfig(conf);<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.253"></a>
+<span class="sourceLineNo">254</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.255"></a>
+<span class="sourceLineNo">256</span><a name="line.256"></a>
+<span class="sourceLineNo">257</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, true);<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.258"></a>
+<span class="sourceLineNo">259</span><a name="line.259"></a>
+<span class="sourceLineNo">260</span>    HColumnDescriptor family = new HColumnDescriptor("testDisableCacheDataBlock");<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    family.setBlockCacheEnabled(false);<a name="line.261"></a>
+<span class="sourceLineNo">262</span><a name="line.262"></a>
+<span class="sourceLineNo">263</span>    cacheConfig = new CacheConfig(conf, family);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  }<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  @Test<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  public void testCacheConfigDefaultLRUBlockCache() {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    basicBlockCacheOps(cc, false, true);<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    assertTrue(cc.getBlockCache() instanceof LruBlockCache);<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  /**<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   */<a name="line.288"></a>
+<span class="sourceLineNo">289</span>  @Test<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public void testOffHeapBucketCacheConfig() {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    doBucketCacheConfigTest();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>  @Test<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  public void testFileBucketCacheConfig() throws IOException {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    try {<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      Path p = new Path(htu.getDataTestDir(), "bc.txt");<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      FileSystem fs = FileSystem.get(this.conf);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      fs.create(p).close();<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      doBucketCacheConfigTest();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    } finally {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      htu.cleanupTestDir();<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private void doBucketCacheConfigTest() {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    final int bcSize = 100;<a name="line.310"></a>
+<span class="sourceLineNo">311</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    basicBlockCacheOps(cc, false, false);<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    BlockCache [] bcs = cbc.getBlockCaches();<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    assertTrue(bcs[0] instanceof LruBlockCache);<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    LruBlockCache lbc = (LruBlockCache)bcs[0];<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    assertEquals(MemorySizeUtil.getOnHeapCacheSize(this.conf), lbc.getMaxSize());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    assertTrue(bcs[1] instanceof BucketCache);<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    BucketCache bc = (BucketCache)bcs[1];<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * LruBlockCache as L1 with a BucketCache for L2.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  @Test<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public void testBucketCacheConfigL1L2Setup() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    // from L1 happens, it does not fail because L2 can't take the eviction because block too big.<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    long lruExpectedSize = MemorySizeUtil.getOnHeapCacheSize(this.conf);<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    final int bcSize = 100;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>    long bcExpectedSize = 100 * 1024 * 1024; // MB.<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    assertTrue(lruExpectedSize &lt; bcExpectedSize);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    basicBlockCacheOps(cc, false, false);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    LruBlockCache lbc = cbc.onHeapCache;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>    assertEquals(lruExpectedSize, lbc.getMaxSize());<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    BlockCache bc = cbc.l2Cache;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    long initialL1BlockCount = lbc.getBlockCount();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    long initialL2BlockCount = bc.getBlockCount();<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    Cacheable c = new DataCacheEntry();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    BlockCacheKey bck = new BlockCacheKey("bck", 0);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    lbc.cacheBlock(bck, c, false);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    assertEquals(initialL2BlockCount, bc.getBlockCount());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    // Force evictions by putting in a block too big.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    final long justTooBigSize = lbc.acceptableSize() + 1;<a name="line.364"></a>
+<span class="sourceLineNo">365</span>    lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      @Override<a name="line.366"></a>
+<span class="sourceLineNo">367</span>      public long heapSize() {<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        return justTooBigSize;<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>      @Override<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      public int getSerializedLength() {<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        return (int)heapSize();<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      }<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    });<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // The eviction thread in lrublockcache needs to run.<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    assertEquals(initialL1BlockCount, lbc.getBlockCount());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  }<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  @Test<a name="line.381"></a>
+<span class="sourceLineNo">382</span>  public void testL2CacheWithInvalidBucketSize() {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>    Configuration c = new Configuration(this.conf);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");<a name="line.385"></a>
+<span class="sourceLineNo">386</span>    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);<a name="line.386"></a>
+<span class="sourceLineNo">387</span>    try {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      CacheConfig.getBucketCache(c);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>      fail("Should throw IllegalArgumentException when passing illegal value for bucket size");<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    } catch (IllegalArgumentException e) {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    }<a name="line.391"></a>
+<span class="sourceLineNo">392</span>  }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>}<a name="line.393"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html
index f8e34f2..ea22449 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.Deserializer.html
@@ -217,184 +217,188 @@
 <span class="sourceLineNo">209</span>  @Test<a name="line.209"></a>
 <span class="sourceLineNo">210</span>  public void testDisableCacheDataBlock() throws IOException {<a name="line.210"></a>
 <span class="sourceLineNo">211</span>    Configuration conf = HBaseConfiguration.create();<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    CacheConfig cacheConfig = new CacheConfig(conf);<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    assertFalse(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.221"></a>
-<span class="sourceLineNo">222</span>    assertFalse(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    cacheConfig = new CacheConfig(conf);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    assertTrue(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    assertTrue(cacheConfig.shouldCacheDataCompressed());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    assertTrue(cacheConfig.shouldCacheDataOnWrite());<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, false);<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>    cacheConfig = new CacheConfig(conf);<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.254"></a>
-<span class="sourceLineNo">255</span><a name="line.255"></a>
-<span class="sourceLineNo">256</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, true);<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>    HColumnDescriptor family = new HColumnDescriptor("testDisableCacheDataBlock");<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    family.setBlockCacheEnabled(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    cacheConfig = new CacheConfig(conf, family);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.269"></a>
-<span class="sourceLineNo">270</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>  @Test<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  public void testCacheConfigDefaultLRUBlockCache() {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    basicBlockCacheOps(cc, false, true);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    assertTrue(cc.getBlockCache() instanceof LruBlockCache);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   */<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  @Test<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  public void testOffHeapBucketCacheConfig() {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    doBucketCacheConfigTest();<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>  @Test<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  public void testFileBucketCacheConfig() throws IOException {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    try {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      Path p = new Path(htu.getDataTestDir(), "bc.txt");<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      FileSystem fs = FileSystem.get(this.conf);<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      fs.create(p).close();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      doBucketCacheConfigTest();<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    } finally {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      htu.cleanupTestDir();<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  }<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  private void doBucketCacheConfigTest() {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final int bcSize = 100;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    basicBlockCacheOps(cc, false, false);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    BlockCache [] bcs = cbc.getBlockCaches();<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    assertTrue(bcs[0] instanceof LruBlockCache);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    LruBlockCache lbc = (LruBlockCache)bcs[0];<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    assertEquals(MemorySizeUtil.getOnHeapCacheSize(this.conf), lbc.getMaxSize());<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    assertTrue(bcs[1] instanceof BucketCache);<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    BucketCache bc = (BucketCache)bcs[1];<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  }<a name="line.323"></a>
-<span class="sourceLineNo">324</span><a name="line.324"></a>
-<span class="sourceLineNo">325</span>  /**<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * LruBlockCache as L1 with a BucketCache for L2.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  @Test<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  public void testBucketCacheConfigL1L2Setup() {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    // from L1 happens, it does not fail because L2 can't take the eviction because block too big.<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    long lruExpectedSize = MemorySizeUtil.getOnHeapCacheSize(this.conf);<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    final int bcSize = 100;<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    long bcExpectedSize = 100 * 1024 * 1024; // MB.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    assertTrue(lruExpectedSize &lt; bcExpectedSize);<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    basicBlockCacheOps(cc, false, false);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    LruBlockCache lbc = cbc.onHeapCache;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    assertEquals(lruExpectedSize, lbc.getMaxSize());<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    BlockCache bc = cbc.l2Cache;<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    long initialL1BlockCount = lbc.getBlockCount();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    long initialL2BlockCount = bc.getBlockCount();<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    Cacheable c = new DataCacheEntry();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    BlockCacheKey bck = new BlockCacheKey("bck", 0);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    lbc.cacheBlock(bck, c, false);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    assertEquals(initialL2BlockCount, bc.getBlockCount());<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    // Force evictions by putting in a block too big.<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    final long justTooBigSize = lbc.acceptableSize() + 1;<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      public long heapSize() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>        return justTooBigSize;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>      @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      public int getSerializedLength() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>        return (int)heapSize();<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      }<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    });<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // The eviction thread in lrublockcache needs to run.<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    assertEquals(initialL1BlockCount, lbc.getBlockCount());<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  @Test<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public void testL2CacheWithInvalidBucketSize() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    Configuration c = new Configuration(this.conf);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    try {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      CacheConfig.getBucketCache(c);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      fail("Should throw IllegalArgumentException when passing illegal value for bucket size");<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    } catch (IllegalArgumentException e) {<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>}<a name="line.389"></a>
+<span class="sourceLineNo">212</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    CacheConfig cacheConfig = new CacheConfig(conf);<a name="line.213"></a>
+<span class="sourceLineNo">214</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    assertFalse(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.222"></a>
+<span class="sourceLineNo">223</span>    assertFalse(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.228"></a>
+<span class="sourceLineNo">229</span><a name="line.229"></a>
+<span class="sourceLineNo">230</span>    cacheConfig = new CacheConfig(conf);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    assertTrue(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    assertTrue(cacheConfig.shouldCacheDataCompressed());<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    assertTrue(cacheConfig.shouldCacheDataOnWrite());<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.238"></a>
+<span class="sourceLineNo">239</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.240"></a>
+<span class="sourceLineNo">241</span><a name="line.241"></a>
+<span class="sourceLineNo">242</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, false);<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.243"></a>
+<span class="sourceLineNo">244</span><a name="line.244"></a>
+<span class="sourceLineNo">245</span>    cacheConfig = new CacheConfig(conf);<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.253"></a>
+<span class="sourceLineNo">254</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.255"></a>
+<span class="sourceLineNo">256</span><a name="line.256"></a>
+<span class="sourceLineNo">257</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, true);<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.258"></a>
+<span class="sourceLineNo">259</span><a name="line.259"></a>
+<span class="sourceLineNo">260</span>    HColumnDescriptor family = new HColumnDescriptor("testDisableCacheDataBlock");<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    family.setBlockCacheEnabled(false);<a name="line.261"></a>
+<span class="sourceLineNo">262</span><a name="line.262"></a>
+<span class="sourceLineNo">263</span>    cacheConfig = new CacheConfig(conf, family);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  }<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  @Test<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  public void testCacheConfigDefaultLRUBlockCache() {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    basicBlockCacheOps(cc, false, true);<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    assertTrue(cc.getBlockCache() instanceof LruBlockCache);<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  /**<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   */<a name="line.288"></a>
+<span class="sourceLineNo">289</span>  @Test<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public void testOffHeapBucketCacheConfig() {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    doBucketCacheConfigTest();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>  @Test<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  public void testFileBucketCacheConfig() throws IOException {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    try {<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      Path p = new Path(htu.getDataTestDir(), "bc.txt");<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      FileSystem fs = FileSystem.get(this.conf);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      fs.create(p).close();<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      doBucketCacheConfigTest();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    } finally {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      htu.cleanupTestDir();<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private void doBucketCacheConfigTest() {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    final int bcSize = 100;<a name="line.310"></a>
+<span class="sourceLineNo">311</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    basicBlockCacheOps(cc, false, false);<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    BlockCache [] bcs = cbc.getBlockCaches();<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    assertTrue(bcs[0] instanceof LruBlockCache);<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    LruBlockCache lbc = (LruBlockCache)bcs[0];<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    assertEquals(MemorySizeUtil.getOnHeapCacheSize(this.conf), lbc.getMaxSize());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    assertTrue(bcs[1] instanceof BucketCache);<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    BucketCache bc = (BucketCache)bcs[1];<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * LruBlockCache as L1 with a BucketCache for L2.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  @Test<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public void testBucketCacheConfigL1L2Setup() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    // from L1 happens, it does not fail because L2 can't take the eviction because block too big.<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    long lruExpectedSize = MemorySizeUtil.getOnHeapCacheSize(this.conf);<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    final int bcSize = 100;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>    long bcExpectedSize = 100 * 1024 * 1024; // MB.<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    assertTrue(lruExpectedSize &lt; bcExpectedSize);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    basicBlockCacheOps(cc, false, false);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    LruBlockCache lbc = cbc.onHeapCache;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>    assertEquals(lruExpectedSize, lbc.getMaxSize());<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    BlockCache bc = cbc.l2Cache;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    long initialL1BlockCount = lbc.getBlockCount();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    long initialL2BlockCount = bc.getBlockCount();<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    Cacheable c = new DataCacheEntry();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    BlockCacheKey bck = new BlockCacheKey("bck", 0);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    lbc.cacheBlock(bck, c, false);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    assertEquals(initialL2BlockCount, bc.getBlockCount());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    // Force evictions by putting in a block too big.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    final long justTooBigSize = lbc.acceptableSize() + 1;<a name="line.364"></a>
+<span class="sourceLineNo">365</span>    lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      @Override<a name="line.366"></a>
+<span class="sourceLineNo">367</span>      public long heapSize() {<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        return justTooBigSize;<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>      @Override<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      public int getSerializedLength() {<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        return (int)heapSize();<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      }<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    });<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // The eviction thread in lrublockcache needs to run.<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    assertEquals(initialL1BlockCount, lbc.getBlockCount());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  }<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  @Test<a name="line.381"></a>
+<span class="sourceLineNo">382</span>  public void testL2CacheWithInvalidBucketSize() {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>    Configuration c = new Configuration(this.conf);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");<a name="line.385"></a>
+<span class="sourceLineNo">386</span>    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);<a name="line.386"></a>
+<span class="sourceLineNo">387</span>    try {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      CacheConfig.getBucketCache(c);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>      fail("Should throw IllegalArgumentException when passing illegal value for bucket size");<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    } catch (IllegalArgumentException e) {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    }<a name="line.391"></a>
+<span class="sourceLineNo">392</span>  }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>}<a name="line.393"></a>
 
 
 


[20/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
index bf5af92..f32b223 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
@@ -223,7 +223,7 @@
 <span class="sourceLineNo">215</span>   * @param family column family configuration<a name="line.215"></a>
 <span class="sourceLineNo">216</span>   */<a name="line.216"></a>
 <span class="sourceLineNo">217</span>  public CacheConfig(Configuration conf, ColumnFamilyDescriptor family) {<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    this(CacheConfig.instantiateBlockCache(conf),<a name="line.218"></a>
+<span class="sourceLineNo">218</span>    this(GLOBAL_BLOCK_CACHE_INSTANCE,<a name="line.218"></a>
 <span class="sourceLineNo">219</span>        conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ)<a name="line.219"></a>
 <span class="sourceLineNo">220</span>           &amp;&amp; family.isBlockCacheEnabled(),<a name="line.220"></a>
 <span class="sourceLineNo">221</span>        family.isInMemory(),<a name="line.221"></a>
@@ -253,463 +253,443 @@
 <span class="sourceLineNo">245</span>   * @param conf hbase configuration<a name="line.245"></a>
 <span class="sourceLineNo">246</span>   */<a name="line.246"></a>
 <span class="sourceLineNo">247</span>  public CacheConfig(Configuration conf) {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    this(conf, true);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>  }<a name="line.249"></a>
-<span class="sourceLineNo">250</span><a name="line.250"></a>
-<span class="sourceLineNo">251</span>  public CacheConfig(Configuration conf, boolean enableBlockCache) {<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    this(conf, enableBlockCache,<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ),<a name="line.253"></a>
-<span class="sourceLineNo">254</span>        DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set<a name="line.254"></a>
-<span class="sourceLineNo">255</span>                           // strictly from conf<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE),<a name="line.256"></a>
-<span class="sourceLineNo">257</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE),<a name="line.257"></a>
-<span class="sourceLineNo">258</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE),<a name="line.258"></a>
-<span class="sourceLineNo">259</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE),<a name="line.259"></a>
-<span class="sourceLineNo">260</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.260"></a>
-<span class="sourceLineNo">261</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN),<a name="line.261"></a>
-<span class="sourceLineNo">262</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY, DROP_BEHIND_CACHE_COMPACTION_DEFAULT));<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    LOG.info("Created cacheConfig: " + this);<a name="line.263"></a>
-<span class="sourceLineNo">264</span>  }<a name="line.264"></a>
-<span class="sourceLineNo">265</span><a name="line.265"></a>
-<span class="sourceLineNo">266</span>  private CacheConfig(Configuration conf, boolean enableBlockCache,<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      final boolean cacheDataOnRead, final boolean inMemory,<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,<a name="line.268"></a>
-<span class="sourceLineNo">269</span>      final boolean cacheBloomsOnWrite, final boolean evictOnClose,<a name="line.269"></a>
-<span class="sourceLineNo">270</span>      final boolean cacheDataCompressed, final boolean prefetchOnOpen,<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      final boolean dropBehindCompaction) {<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    if (enableBlockCache) {<a name="line.272"></a>
-<span class="sourceLineNo">273</span>      this.blockCache = CacheConfig.instantiateBlockCache(conf);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    } else {<a name="line.274"></a>
-<span class="sourceLineNo">275</span>      this.blockCache = null;<a name="line.275"></a>
-<span class="sourceLineNo">276</span>    }<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    this.cacheDataOnRead = cacheDataOnRead;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    this.inMemory = inMemory;<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    this.cacheIndexesOnWrite = cacheIndexesOnWrite;<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    this.cacheBloomsOnWrite = cacheBloomsOnWrite;<a name="line.281"></a>
-<span class="sourceLineNo">282</span>    this.evictOnClose = evictOnClose;<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    this.cacheDataCompressed = cacheDataCompressed;<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    this.prefetchOnOpen = prefetchOnOpen;<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    this.dropBehindCompaction = dropBehindCompaction;<a name="line.285"></a>
-<span class="sourceLineNo">286</span>  }<a name="line.286"></a>
-<span class="sourceLineNo">287</span><a name="line.287"></a>
-<span class="sourceLineNo">288</span>  /**<a name="line.288"></a>
-<span class="sourceLineNo">289</span>   * Create a block cache configuration with the specified cache and configuration parameters.<a name="line.289"></a>
-<span class="sourceLineNo">290</span>   * @param blockCache reference to block cache, null if completely disabled<a name="line.290"></a>
-<span class="sourceLineNo">291</span>   * @param cacheDataOnRead whether DATA blocks should be cached on read (we always cache INDEX<a name="line.291"></a>
-<span class="sourceLineNo">292</span>   *          blocks and BLOOM blocks; this cannot be disabled).<a name="line.292"></a>
-<span class="sourceLineNo">293</span>   * @param inMemory whether blocks should be flagged as in-memory<a name="line.293"></a>
-<span class="sourceLineNo">294</span>   * @param cacheDataOnWrite whether data blocks should be cached on write<a name="line.294"></a>
-<span class="sourceLineNo">295</span>   * @param cacheIndexesOnWrite whether index blocks should be cached on write<a name="line.295"></a>
-<span class="sourceLineNo">296</span>   * @param cacheBloomsOnWrite whether blooms should be cached on write<a name="line.296"></a>
-<span class="sourceLineNo">297</span>   * @param evictOnClose whether blocks should be evicted when HFile is closed<a name="line.297"></a>
-<span class="sourceLineNo">298</span>   * @param cacheDataCompressed whether to store blocks as compressed in the cache<a name="line.298"></a>
-<span class="sourceLineNo">299</span>   * @param prefetchOnOpen whether to prefetch blocks upon open<a name="line.299"></a>
-<span class="sourceLineNo">300</span>   * @param dropBehindCompaction indicate that we should set drop behind to true when open a store<a name="line.300"></a>
-<span class="sourceLineNo">301</span>   *          file reader for compaction<a name="line.301"></a>
-<span class="sourceLineNo">302</span>   */<a name="line.302"></a>
-<span class="sourceLineNo">303</span>  @VisibleForTesting<a name="line.303"></a>
-<span class="sourceLineNo">304</span>  CacheConfig(final BlockCache blockCache,<a name="line.304"></a>
-<span class="sourceLineNo">305</span>      final boolean cacheDataOnRead, final boolean inMemory,<a name="line.305"></a>
-<span class="sourceLineNo">306</span>      final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,<a name="line.306"></a>
-<span class="sourceLineNo">307</span>      final boolean cacheBloomsOnWrite, final boolean evictOnClose,<a name="line.307"></a>
-<span class="sourceLineNo">308</span>      final boolean cacheDataCompressed, final boolean prefetchOnOpen,<a name="line.308"></a>
-<span class="sourceLineNo">309</span>      final boolean dropBehindCompaction) {<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    this.blockCache = blockCache;<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    this.cacheDataOnRead = cacheDataOnRead;<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    this.inMemory = inMemory;<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    this.cacheIndexesOnWrite = cacheIndexesOnWrite;<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    this.cacheBloomsOnWrite = cacheBloomsOnWrite;<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    this.evictOnClose = evictOnClose;<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    this.cacheDataCompressed = cacheDataCompressed;<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    this.prefetchOnOpen = prefetchOnOpen;<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    this.dropBehindCompaction = dropBehindCompaction;<a name="line.319"></a>
-<span class="sourceLineNo">320</span>  }<a name="line.320"></a>
-<span class="sourceLineNo">321</span><a name="line.321"></a>
-<span class="sourceLineNo">322</span>  /**<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * Constructs a cache configuration copied from the specified configuration.<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * @param cacheConf<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   */<a name="line.325"></a>
-<span class="sourceLineNo">326</span>  public CacheConfig(CacheConfig cacheConf) {<a name="line.326"></a>
-<span class="sourceLineNo">327</span>    this(cacheConf.blockCache, cacheConf.cacheDataOnRead, cacheConf.inMemory,<a name="line.327"></a>
-<span class="sourceLineNo">328</span>        cacheConf.cacheDataOnWrite, cacheConf.cacheIndexesOnWrite,<a name="line.328"></a>
-<span class="sourceLineNo">329</span>        cacheConf.cacheBloomsOnWrite, cacheConf.evictOnClose,<a name="line.329"></a>
-<span class="sourceLineNo">330</span>        cacheConf.cacheDataCompressed, cacheConf.prefetchOnOpen,<a name="line.330"></a>
-<span class="sourceLineNo">331</span>        cacheConf.dropBehindCompaction);<a name="line.331"></a>
-<span class="sourceLineNo">332</span>  }<a name="line.332"></a>
-<span class="sourceLineNo">333</span><a name="line.333"></a>
-<span class="sourceLineNo">334</span>  private CacheConfig() {<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    this(null, false, false, false, false, false, false, false, false, false);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>  }<a name="line.336"></a>
-<span class="sourceLineNo">337</span><a name="line.337"></a>
-<span class="sourceLineNo">338</span>  /**<a name="line.338"></a>
-<span class="sourceLineNo">339</span>   * Checks whether the block cache is enabled.<a name="line.339"></a>
-<span class="sourceLineNo">340</span>   */<a name="line.340"></a>
-<span class="sourceLineNo">341</span>  public boolean isBlockCacheEnabled() {<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    return this.blockCache != null;<a name="line.342"></a>
-<span class="sourceLineNo">343</span>  }<a name="line.343"></a>
-<span class="sourceLineNo">344</span><a name="line.344"></a>
-<span class="sourceLineNo">345</span>  /**<a name="line.345"></a>
-<span class="sourceLineNo">346</span>   * Returns the block cache.<a name="line.346"></a>
-<span class="sourceLineNo">347</span>   * @return the block cache, or null if caching is completely disabled<a name="line.347"></a>
-<span class="sourceLineNo">348</span>   */<a name="line.348"></a>
-<span class="sourceLineNo">349</span>  public BlockCache getBlockCache() {<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    return this.blockCache;<a name="line.350"></a>
-<span class="sourceLineNo">351</span>  }<a name="line.351"></a>
-<span class="sourceLineNo">352</span><a name="line.352"></a>
-<span class="sourceLineNo">353</span>  /**<a name="line.353"></a>
-<span class="sourceLineNo">354</span>   * Returns whether the DATA blocks of this HFile should be cached on read or not (we always<a name="line.354"></a>
-<span class="sourceLineNo">355</span>   * cache the meta blocks, the INDEX and BLOOM blocks).<a name="line.355"></a>
-<span class="sourceLineNo">356</span>   * @return true if blocks should be cached on read, false if not<a name="line.356"></a>
+<span class="sourceLineNo">248</span>    this(GLOBAL_BLOCK_CACHE_INSTANCE,<a name="line.248"></a>
+<span class="sourceLineNo">249</span>        conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ),<a name="line.249"></a>
+<span class="sourceLineNo">250</span>        DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set<a name="line.250"></a>
+<span class="sourceLineNo">251</span>        // strictly from conf<a name="line.251"></a>
+<span class="sourceLineNo">252</span>        conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE),<a name="line.252"></a>
+<span class="sourceLineNo">253</span>        conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE),<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE),<a name="line.254"></a>
+<span class="sourceLineNo">255</span>        conf.getBoolean(EVICT_BLOCKS_ON_CLOSE_KEY, DEFAULT_EVICT_ON_CLOSE),<a name="line.255"></a>
+<span class="sourceLineNo">256</span>        conf.getBoolean(CACHE_DATA_BLOCKS_COMPRESSED_KEY, DEFAULT_CACHE_DATA_COMPRESSED),<a name="line.256"></a>
+<span class="sourceLineNo">257</span>        conf.getBoolean(PREFETCH_BLOCKS_ON_OPEN_KEY, DEFAULT_PREFETCH_ON_OPEN),<a name="line.257"></a>
+<span class="sourceLineNo">258</span>        conf.getBoolean(DROP_BEHIND_CACHE_COMPACTION_KEY, DROP_BEHIND_CACHE_COMPACTION_DEFAULT));<a name="line.258"></a>
+<span class="sourceLineNo">259</span>    LOG.info("Created cacheConfig: " + this);<a name="line.259"></a>
+<span class="sourceLineNo">260</span>  }<a name="line.260"></a>
+<span class="sourceLineNo">261</span><a name="line.261"></a>
+<span class="sourceLineNo">262</span>  /**<a name="line.262"></a>
+<span class="sourceLineNo">263</span>   * Create a block cache configuration with the specified cache and configuration parameters.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>   * @param blockCache reference to block cache, null if completely disabled<a name="line.264"></a>
+<span class="sourceLineNo">265</span>   * @param cacheDataOnRead whether DATA blocks should be cached on read (we always cache INDEX<a name="line.265"></a>
+<span class="sourceLineNo">266</span>   *          blocks and BLOOM blocks; this cannot be disabled).<a name="line.266"></a>
+<span class="sourceLineNo">267</span>   * @param inMemory whether blocks should be flagged as in-memory<a name="line.267"></a>
+<span class="sourceLineNo">268</span>   * @param cacheDataOnWrite whether data blocks should be cached on write<a name="line.268"></a>
+<span class="sourceLineNo">269</span>   * @param cacheIndexesOnWrite whether index blocks should be cached on write<a name="line.269"></a>
+<span class="sourceLineNo">270</span>   * @param cacheBloomsOnWrite whether blooms should be cached on write<a name="line.270"></a>
+<span class="sourceLineNo">271</span>   * @param evictOnClose whether blocks should be evicted when HFile is closed<a name="line.271"></a>
+<span class="sourceLineNo">272</span>   * @param cacheDataCompressed whether to store blocks as compressed in the cache<a name="line.272"></a>
+<span class="sourceLineNo">273</span>   * @param prefetchOnOpen whether to prefetch blocks upon open<a name="line.273"></a>
+<span class="sourceLineNo">274</span>   * @param dropBehindCompaction indicate that we should set drop behind to true when open a store<a name="line.274"></a>
+<span class="sourceLineNo">275</span>   *          file reader for compaction<a name="line.275"></a>
+<span class="sourceLineNo">276</span>   */<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  @VisibleForTesting<a name="line.277"></a>
+<span class="sourceLineNo">278</span>  CacheConfig(final BlockCache blockCache,<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      final boolean cacheDataOnRead, final boolean inMemory,<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      final boolean cacheBloomsOnWrite, final boolean evictOnClose,<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      final boolean cacheDataCompressed, final boolean prefetchOnOpen,<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      final boolean dropBehindCompaction) {<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    this.blockCache = blockCache;<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    this.cacheDataOnRead = cacheDataOnRead;<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    this.inMemory = inMemory;<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    this.cacheIndexesOnWrite = cacheIndexesOnWrite;<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    this.cacheBloomsOnWrite = cacheBloomsOnWrite;<a name="line.289"></a>
+<span class="sourceLineNo">290</span>    this.evictOnClose = evictOnClose;<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    this.cacheDataCompressed = cacheDataCompressed;<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    this.prefetchOnOpen = prefetchOnOpen;<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    this.dropBehindCompaction = dropBehindCompaction;<a name="line.293"></a>
+<span class="sourceLineNo">294</span>  }<a name="line.294"></a>
+<span class="sourceLineNo">295</span><a name="line.295"></a>
+<span class="sourceLineNo">296</span>  /**<a name="line.296"></a>
+<span class="sourceLineNo">297</span>   * Constructs a cache configuration copied from the specified configuration.<a name="line.297"></a>
+<span class="sourceLineNo">298</span>   * @param cacheConf<a name="line.298"></a>
+<span class="sourceLineNo">299</span>   */<a name="line.299"></a>
+<span class="sourceLineNo">300</span>  public CacheConfig(CacheConfig cacheConf) {<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    this(cacheConf.blockCache, cacheConf.cacheDataOnRead, cacheConf.inMemory,<a name="line.301"></a>
+<span class="sourceLineNo">302</span>        cacheConf.cacheDataOnWrite, cacheConf.cacheIndexesOnWrite,<a name="line.302"></a>
+<span class="sourceLineNo">303</span>        cacheConf.cacheBloomsOnWrite, cacheConf.evictOnClose,<a name="line.303"></a>
+<span class="sourceLineNo">304</span>        cacheConf.cacheDataCompressed, cacheConf.prefetchOnOpen,<a name="line.304"></a>
+<span class="sourceLineNo">305</span>        cacheConf.dropBehindCompaction);<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  }<a name="line.306"></a>
+<span class="sourceLineNo">307</span><a name="line.307"></a>
+<span class="sourceLineNo">308</span>  private CacheConfig() {<a name="line.308"></a>
+<span class="sourceLineNo">309</span>    this(null, false, false, false, false, false, false, false, false, false);<a name="line.309"></a>
+<span class="sourceLineNo">310</span>  }<a name="line.310"></a>
+<span class="sourceLineNo">311</span><a name="line.311"></a>
+<span class="sourceLineNo">312</span>  /**<a name="line.312"></a>
+<span class="sourceLineNo">313</span>   * Checks whether the block cache is enabled.<a name="line.313"></a>
+<span class="sourceLineNo">314</span>   */<a name="line.314"></a>
+<span class="sourceLineNo">315</span>  public boolean isBlockCacheEnabled() {<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    return this.blockCache != null;<a name="line.316"></a>
+<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
+<span class="sourceLineNo">318</span><a name="line.318"></a>
+<span class="sourceLineNo">319</span>  /**<a name="line.319"></a>
+<span class="sourceLineNo">320</span>   * Returns the block cache.<a name="line.320"></a>
+<span class="sourceLineNo">321</span>   * @return the block cache, or null if caching is completely disabled<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   */<a name="line.322"></a>
+<span class="sourceLineNo">323</span>  public BlockCache getBlockCache() {<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    return this.blockCache;<a name="line.324"></a>
+<span class="sourceLineNo">325</span>  }<a name="line.325"></a>
+<span class="sourceLineNo">326</span><a name="line.326"></a>
+<span class="sourceLineNo">327</span>  /**<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * Returns whether the DATA blocks of this HFile should be cached on read or not (we always<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * cache the meta blocks, the INDEX and BLOOM blocks).<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * @return true if blocks should be cached on read, false if not<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  public boolean shouldCacheDataOnRead() {<a name="line.332"></a>
+<span class="sourceLineNo">333</span>    return isBlockCacheEnabled() &amp;&amp; cacheDataOnRead;<a name="line.333"></a>
+<span class="sourceLineNo">334</span>  }<a name="line.334"></a>
+<span class="sourceLineNo">335</span><a name="line.335"></a>
+<span class="sourceLineNo">336</span>  public boolean shouldDropBehindCompaction() {<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    return dropBehindCompaction;<a name="line.337"></a>
+<span class="sourceLineNo">338</span>  }<a name="line.338"></a>
+<span class="sourceLineNo">339</span><a name="line.339"></a>
+<span class="sourceLineNo">340</span>  /**<a name="line.340"></a>
+<span class="sourceLineNo">341</span>   * Should we cache a block of a particular category? We always cache<a name="line.341"></a>
+<span class="sourceLineNo">342</span>   * important blocks such as index blocks, as long as the block cache is<a name="line.342"></a>
+<span class="sourceLineNo">343</span>   * available.<a name="line.343"></a>
+<span class="sourceLineNo">344</span>   */<a name="line.344"></a>
+<span class="sourceLineNo">345</span>  public boolean shouldCacheBlockOnRead(BlockCategory category) {<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    return isBlockCacheEnabled()<a name="line.346"></a>
+<span class="sourceLineNo">347</span>        &amp;&amp; (cacheDataOnRead ||<a name="line.347"></a>
+<span class="sourceLineNo">348</span>            category == BlockCategory.INDEX ||<a name="line.348"></a>
+<span class="sourceLineNo">349</span>            category == BlockCategory.BLOOM ||<a name="line.349"></a>
+<span class="sourceLineNo">350</span>            (prefetchOnOpen &amp;&amp;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>                (category != BlockCategory.META &amp;&amp;<a name="line.351"></a>
+<span class="sourceLineNo">352</span>                 category != BlockCategory.UNKNOWN)));<a name="line.352"></a>
+<span class="sourceLineNo">353</span>  }<a name="line.353"></a>
+<span class="sourceLineNo">354</span><a name="line.354"></a>
+<span class="sourceLineNo">355</span>  /**<a name="line.355"></a>
+<span class="sourceLineNo">356</span>   * @return true if blocks in this file should be flagged as in-memory<a name="line.356"></a>
 <span class="sourceLineNo">357</span>   */<a name="line.357"></a>
-<span class="sourceLineNo">358</span>  public boolean shouldCacheDataOnRead() {<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    return isBlockCacheEnabled() &amp;&amp; cacheDataOnRead;<a name="line.359"></a>
+<span class="sourceLineNo">358</span>  public boolean isInMemory() {<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    return isBlockCacheEnabled() &amp;&amp; this.inMemory;<a name="line.359"></a>
 <span class="sourceLineNo">360</span>  }<a name="line.360"></a>
 <span class="sourceLineNo">361</span><a name="line.361"></a>
-<span class="sourceLineNo">362</span>  public boolean shouldDropBehindCompaction() {<a name="line.362"></a>
-<span class="sourceLineNo">363</span>    return dropBehindCompaction;<a name="line.363"></a>
-<span class="sourceLineNo">364</span>  }<a name="line.364"></a>
-<span class="sourceLineNo">365</span><a name="line.365"></a>
-<span class="sourceLineNo">366</span>  /**<a name="line.366"></a>
-<span class="sourceLineNo">367</span>   * Should we cache a block of a particular category? We always cache<a name="line.367"></a>
-<span class="sourceLineNo">368</span>   * important blocks such as index blocks, as long as the block cache is<a name="line.368"></a>
-<span class="sourceLineNo">369</span>   * available.<a name="line.369"></a>
-<span class="sourceLineNo">370</span>   */<a name="line.370"></a>
-<span class="sourceLineNo">371</span>  public boolean shouldCacheBlockOnRead(BlockCategory category) {<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    return isBlockCacheEnabled()<a name="line.372"></a>
-<span class="sourceLineNo">373</span>        &amp;&amp; (cacheDataOnRead ||<a name="line.373"></a>
-<span class="sourceLineNo">374</span>            category == BlockCategory.INDEX ||<a name="line.374"></a>
-<span class="sourceLineNo">375</span>            category == BlockCategory.BLOOM ||<a name="line.375"></a>
-<span class="sourceLineNo">376</span>            (prefetchOnOpen &amp;&amp;<a name="line.376"></a>
-<span class="sourceLineNo">377</span>                (category != BlockCategory.META &amp;&amp;<a name="line.377"></a>
-<span class="sourceLineNo">378</span>                 category != BlockCategory.UNKNOWN)));<a name="line.378"></a>
-<span class="sourceLineNo">379</span>  }<a name="line.379"></a>
-<span class="sourceLineNo">380</span><a name="line.380"></a>
-<span class="sourceLineNo">381</span>  /**<a name="line.381"></a>
-<span class="sourceLineNo">382</span>   * @return true if blocks in this file should be flagged as in-memory<a name="line.382"></a>
+<span class="sourceLineNo">362</span>  /**<a name="line.362"></a>
+<span class="sourceLineNo">363</span>   * @return true if data blocks should be written to the cache when an HFile is<a name="line.363"></a>
+<span class="sourceLineNo">364</span>   *         written, false if not<a name="line.364"></a>
+<span class="sourceLineNo">365</span>   */<a name="line.365"></a>
+<span class="sourceLineNo">366</span>  public boolean shouldCacheDataOnWrite() {<a name="line.366"></a>
+<span class="sourceLineNo">367</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnWrite;<a name="line.367"></a>
+<span class="sourceLineNo">368</span>  }<a name="line.368"></a>
+<span class="sourceLineNo">369</span><a name="line.369"></a>
+<span class="sourceLineNo">370</span>  /**<a name="line.370"></a>
+<span class="sourceLineNo">371</span>   * Only used for testing.<a name="line.371"></a>
+<span class="sourceLineNo">372</span>   * @param cacheDataOnWrite whether data blocks should be written to the cache<a name="line.372"></a>
+<span class="sourceLineNo">373</span>   *                         when an HFile is written<a name="line.373"></a>
+<span class="sourceLineNo">374</span>   */<a name="line.374"></a>
+<span class="sourceLineNo">375</span>  @VisibleForTesting<a name="line.375"></a>
+<span class="sourceLineNo">376</span>  public void setCacheDataOnWrite(boolean cacheDataOnWrite) {<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.377"></a>
+<span class="sourceLineNo">378</span>  }<a name="line.378"></a>
+<span class="sourceLineNo">379</span><a name="line.379"></a>
+<span class="sourceLineNo">380</span>  /**<a name="line.380"></a>
+<span class="sourceLineNo">381</span>   * @return true if index blocks should be written to the cache when an HFile<a name="line.381"></a>
+<span class="sourceLineNo">382</span>   *         is written, false if not<a name="line.382"></a>
 <span class="sourceLineNo">383</span>   */<a name="line.383"></a>
-<span class="sourceLineNo">384</span>  public boolean isInMemory() {<a name="line.384"></a>
-<span class="sourceLineNo">385</span>    return isBlockCacheEnabled() &amp;&amp; this.inMemory;<a name="line.385"></a>
+<span class="sourceLineNo">384</span>  public boolean shouldCacheIndexesOnWrite() {<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheIndexesOnWrite;<a name="line.385"></a>
 <span class="sourceLineNo">386</span>  }<a name="line.386"></a>
 <span class="sourceLineNo">387</span><a name="line.387"></a>
 <span class="sourceLineNo">388</span>  /**<a name="line.388"></a>
-<span class="sourceLineNo">389</span>   * @return true if data blocks should be written to the cache when an HFile is<a name="line.389"></a>
-<span class="sourceLineNo">390</span>   *         written, false if not<a name="line.390"></a>
+<span class="sourceLineNo">389</span>   * @return true if bloom blocks should be written to the cache when an HFile<a name="line.389"></a>
+<span class="sourceLineNo">390</span>   *         is written, false if not<a name="line.390"></a>
 <span class="sourceLineNo">391</span>   */<a name="line.391"></a>
-<span class="sourceLineNo">392</span>  public boolean shouldCacheDataOnWrite() {<a name="line.392"></a>
-<span class="sourceLineNo">393</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnWrite;<a name="line.393"></a>
+<span class="sourceLineNo">392</span>  public boolean shouldCacheBloomsOnWrite() {<a name="line.392"></a>
+<span class="sourceLineNo">393</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheBloomsOnWrite;<a name="line.393"></a>
 <span class="sourceLineNo">394</span>  }<a name="line.394"></a>
 <span class="sourceLineNo">395</span><a name="line.395"></a>
 <span class="sourceLineNo">396</span>  /**<a name="line.396"></a>
-<span class="sourceLineNo">397</span>   * Only used for testing.<a name="line.397"></a>
-<span class="sourceLineNo">398</span>   * @param cacheDataOnWrite whether data blocks should be written to the cache<a name="line.398"></a>
-<span class="sourceLineNo">399</span>   *                         when an HFile is written<a name="line.399"></a>
-<span class="sourceLineNo">400</span>   */<a name="line.400"></a>
-<span class="sourceLineNo">401</span>  @VisibleForTesting<a name="line.401"></a>
-<span class="sourceLineNo">402</span>  public void setCacheDataOnWrite(boolean cacheDataOnWrite) {<a name="line.402"></a>
-<span class="sourceLineNo">403</span>    this.cacheDataOnWrite = cacheDataOnWrite;<a name="line.403"></a>
-<span class="sourceLineNo">404</span>  }<a name="line.404"></a>
-<span class="sourceLineNo">405</span><a name="line.405"></a>
-<span class="sourceLineNo">406</span>  /**<a name="line.406"></a>
-<span class="sourceLineNo">407</span>   * @return true if index blocks should be written to the cache when an HFile<a name="line.407"></a>
-<span class="sourceLineNo">408</span>   *         is written, false if not<a name="line.408"></a>
-<span class="sourceLineNo">409</span>   */<a name="line.409"></a>
-<span class="sourceLineNo">410</span>  public boolean shouldCacheIndexesOnWrite() {<a name="line.410"></a>
-<span class="sourceLineNo">411</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheIndexesOnWrite;<a name="line.411"></a>
-<span class="sourceLineNo">412</span>  }<a name="line.412"></a>
-<span class="sourceLineNo">413</span><a name="line.413"></a>
-<span class="sourceLineNo">414</span>  /**<a name="line.414"></a>
-<span class="sourceLineNo">415</span>   * @return true if bloom blocks should be written to the cache when an HFile<a name="line.415"></a>
-<span class="sourceLineNo">416</span>   *         is written, false if not<a name="line.416"></a>
-<span class="sourceLineNo">417</span>   */<a name="line.417"></a>
-<span class="sourceLineNo">418</span>  public boolean shouldCacheBloomsOnWrite() {<a name="line.418"></a>
-<span class="sourceLineNo">419</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheBloomsOnWrite;<a name="line.419"></a>
-<span class="sourceLineNo">420</span>  }<a name="line.420"></a>
-<span class="sourceLineNo">421</span><a name="line.421"></a>
-<span class="sourceLineNo">422</span>  /**<a name="line.422"></a>
-<span class="sourceLineNo">423</span>   * @return true if blocks should be evicted from the cache when an HFile<a name="line.423"></a>
-<span class="sourceLineNo">424</span>   *         reader is closed, false if not<a name="line.424"></a>
-<span class="sourceLineNo">425</span>   */<a name="line.425"></a>
-<span class="sourceLineNo">426</span>  public boolean shouldEvictOnClose() {<a name="line.426"></a>
-<span class="sourceLineNo">427</span>    return isBlockCacheEnabled() &amp;&amp; this.evictOnClose;<a name="line.427"></a>
-<span class="sourceLineNo">428</span>  }<a name="line.428"></a>
-<span class="sourceLineNo">429</span><a name="line.429"></a>
-<span class="sourceLineNo">430</span>  /**<a name="line.430"></a>
-<span class="sourceLineNo">431</span>   * Only used for testing.<a name="line.431"></a>
-<span class="sourceLineNo">432</span>   * @param evictOnClose whether blocks should be evicted from the cache when an<a name="line.432"></a>
-<span class="sourceLineNo">433</span>   *                     HFile reader is closed<a name="line.433"></a>
-<span class="sourceLineNo">434</span>   */<a name="line.434"></a>
-<span class="sourceLineNo">435</span>  public void setEvictOnClose(boolean evictOnClose) {<a name="line.435"></a>
-<span class="sourceLineNo">436</span>    this.evictOnClose = evictOnClose;<a name="line.436"></a>
-<span class="sourceLineNo">437</span>  }<a name="line.437"></a>
-<span class="sourceLineNo">438</span><a name="line.438"></a>
-<span class="sourceLineNo">439</span>  /**<a name="line.439"></a>
-<span class="sourceLineNo">440</span>   * @return true if data blocks should be compressed in the cache, false if not<a name="line.440"></a>
-<span class="sourceLineNo">441</span>   */<a name="line.441"></a>
-<span class="sourceLineNo">442</span>  public boolean shouldCacheDataCompressed() {<a name="line.442"></a>
-<span class="sourceLineNo">443</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.443"></a>
-<span class="sourceLineNo">444</span>  }<a name="line.444"></a>
-<span class="sourceLineNo">445</span><a name="line.445"></a>
-<span class="sourceLineNo">446</span>  /**<a name="line.446"></a>
-<span class="sourceLineNo">447</span>   * @return true if this {@link BlockCategory} should be compressed in blockcache, false otherwise<a name="line.447"></a>
-<span class="sourceLineNo">448</span>   */<a name="line.448"></a>
-<span class="sourceLineNo">449</span>  public boolean shouldCacheCompressed(BlockCategory category) {<a name="line.449"></a>
-<span class="sourceLineNo">450</span>    if (!isBlockCacheEnabled()) return false;<a name="line.450"></a>
-<span class="sourceLineNo">451</span>    switch (category) {<a name="line.451"></a>
-<span class="sourceLineNo">452</span>      case DATA:<a name="line.452"></a>
-<span class="sourceLineNo">453</span>        return this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.453"></a>
-<span class="sourceLineNo">454</span>      default:<a name="line.454"></a>
-<span class="sourceLineNo">455</span>        return false;<a name="line.455"></a>
-<span class="sourceLineNo">456</span>    }<a name="line.456"></a>
-<span class="sourceLineNo">457</span>  }<a name="line.457"></a>
-<span class="sourceLineNo">458</span><a name="line.458"></a>
-<span class="sourceLineNo">459</span>  /**<a name="line.459"></a>
-<span class="sourceLineNo">460</span>   * @return true if blocks should be prefetched into the cache on open, false if not<a name="line.460"></a>
-<span class="sourceLineNo">461</span>   */<a name="line.461"></a>
-<span class="sourceLineNo">462</span>  public boolean shouldPrefetchOnOpen() {<a name="line.462"></a>
-<span class="sourceLineNo">463</span>    return isBlockCacheEnabled() &amp;&amp; this.prefetchOnOpen;<a name="line.463"></a>
-<span class="sourceLineNo">464</span>  }<a name="line.464"></a>
-<span class="sourceLineNo">465</span><a name="line.465"></a>
-<span class="sourceLineNo">466</span>  /**<a name="line.466"></a>
-<span class="sourceLineNo">467</span>   * Return true if we may find this type of block in block cache.<a name="line.467"></a>
-<span class="sourceLineNo">468</span>   * &lt;p&gt;<a name="line.468"></a>
-<span class="sourceLineNo">469</span>   * TODO: today {@code family.isBlockCacheEnabled()} only means {@code cacheDataOnRead}, so here we<a name="line.469"></a>
-<span class="sourceLineNo">470</span>   * consider lots of other configurations such as {@code cacheDataOnWrite}. We should fix this in<a name="line.470"></a>
-<span class="sourceLineNo">471</span>   * the future, {@code cacheDataOnWrite} should honor the CF level {@code isBlockCacheEnabled}<a name="line.471"></a>
-<span class="sourceLineNo">472</span>   * configuration.<a name="line.472"></a>
-<span class="sourceLineNo">473</span>   */<a name="line.473"></a>
-<span class="sourceLineNo">474</span>  public boolean shouldReadBlockFromCache(BlockType blockType) {<a name="line.474"></a>
-<span class="sourceLineNo">475</span>    if (!isBlockCacheEnabled()) {<a name="line.475"></a>
-<span class="sourceLineNo">476</span>      return false;<a name="line.476"></a>
-<span class="sourceLineNo">477</span>    }<a name="line.477"></a>
-<span class="sourceLineNo">478</span>    if (cacheDataOnRead) {<a name="line.478"></a>
-<span class="sourceLineNo">479</span>      return true;<a name="line.479"></a>
-<span class="sourceLineNo">480</span>    }<a name="line.480"></a>
-<span class="sourceLineNo">481</span>    if (prefetchOnOpen) {<a name="line.481"></a>
-<span class="sourceLineNo">482</span>      return true;<a name="line.482"></a>
-<span class="sourceLineNo">483</span>    }<a name="line.483"></a>
-<span class="sourceLineNo">484</span>    if (cacheDataOnWrite) {<a name="line.484"></a>
-<span class="sourceLineNo">485</span>      return true;<a name="line.485"></a>
+<span class="sourceLineNo">397</span>   * @return true if blocks should be evicted from the cache when an HFile<a name="line.397"></a>
+<span class="sourceLineNo">398</span>   *         reader is closed, false if not<a name="line.398"></a>
+<span class="sourceLineNo">399</span>   */<a name="line.399"></a>
+<span class="sourceLineNo">400</span>  public boolean shouldEvictOnClose() {<a name="line.400"></a>
+<span class="sourceLineNo">401</span>    return isBlockCacheEnabled() &amp;&amp; this.evictOnClose;<a name="line.401"></a>
+<span class="sourceLineNo">402</span>  }<a name="line.402"></a>
+<span class="sourceLineNo">403</span><a name="line.403"></a>
+<span class="sourceLineNo">404</span>  /**<a name="line.404"></a>
+<span class="sourceLineNo">405</span>   * Only used for testing.<a name="line.405"></a>
+<span class="sourceLineNo">406</span>   * @param evictOnClose whether blocks should be evicted from the cache when an<a name="line.406"></a>
+<span class="sourceLineNo">407</span>   *                     HFile reader is closed<a name="line.407"></a>
+<span class="sourceLineNo">408</span>   */<a name="line.408"></a>
+<span class="sourceLineNo">409</span>  public void setEvictOnClose(boolean evictOnClose) {<a name="line.409"></a>
+<span class="sourceLineNo">410</span>    this.evictOnClose = evictOnClose;<a name="line.410"></a>
+<span class="sourceLineNo">411</span>  }<a name="line.411"></a>
+<span class="sourceLineNo">412</span><a name="line.412"></a>
+<span class="sourceLineNo">413</span>  /**<a name="line.413"></a>
+<span class="sourceLineNo">414</span>   * @return true if data blocks should be compressed in the cache, false if not<a name="line.414"></a>
+<span class="sourceLineNo">415</span>   */<a name="line.415"></a>
+<span class="sourceLineNo">416</span>  public boolean shouldCacheDataCompressed() {<a name="line.416"></a>
+<span class="sourceLineNo">417</span>    return isBlockCacheEnabled() &amp;&amp; this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.417"></a>
+<span class="sourceLineNo">418</span>  }<a name="line.418"></a>
+<span class="sourceLineNo">419</span><a name="line.419"></a>
+<span class="sourceLineNo">420</span>  /**<a name="line.420"></a>
+<span class="sourceLineNo">421</span>   * @return true if this {@link BlockCategory} should be compressed in blockcache, false otherwise<a name="line.421"></a>
+<span class="sourceLineNo">422</span>   */<a name="line.422"></a>
+<span class="sourceLineNo">423</span>  public boolean shouldCacheCompressed(BlockCategory category) {<a name="line.423"></a>
+<span class="sourceLineNo">424</span>    if (!isBlockCacheEnabled()) return false;<a name="line.424"></a>
+<span class="sourceLineNo">425</span>    switch (category) {<a name="line.425"></a>
+<span class="sourceLineNo">426</span>      case DATA:<a name="line.426"></a>
+<span class="sourceLineNo">427</span>        return this.cacheDataOnRead &amp;&amp; this.cacheDataCompressed;<a name="line.427"></a>
+<span class="sourceLineNo">428</span>      default:<a name="line.428"></a>
+<span class="sourceLineNo">429</span>        return false;<a name="line.429"></a>
+<span class="sourceLineNo">430</span>    }<a name="line.430"></a>
+<span class="sourceLineNo">431</span>  }<a name="line.431"></a>
+<span class="sourceLineNo">432</span><a name="line.432"></a>
+<span class="sourceLineNo">433</span>  /**<a name="line.433"></a>
+<span class="sourceLineNo">434</span>   * @return true if blocks should be prefetched into the cache on open, false if not<a name="line.434"></a>
+<span class="sourceLineNo">435</span>   */<a name="line.435"></a>
+<span class="sourceLineNo">436</span>  public boolean shouldPrefetchOnOpen() {<a name="line.436"></a>
+<span class="sourceLineNo">437</span>    return isBlockCacheEnabled() &amp;&amp; this.prefetchOnOpen;<a name="line.437"></a>
+<span class="sourceLineNo">438</span>  }<a name="line.438"></a>
+<span class="sourceLineNo">439</span><a name="line.439"></a>
+<span class="sourceLineNo">440</span>  /**<a name="line.440"></a>
+<span class="sourceLineNo">441</span>   * Return true if we may find this type of block in block cache.<a name="line.441"></a>
+<span class="sourceLineNo">442</span>   * &lt;p&gt;<a name="line.442"></a>
+<span class="sourceLineNo">443</span>   * TODO: today {@code family.isBlockCacheEnabled()} only means {@code cacheDataOnRead}, so here we<a name="line.443"></a>
+<span class="sourceLineNo">444</span>   * consider lots of other configurations such as {@code cacheDataOnWrite}. We should fix this in<a name="line.444"></a>
+<span class="sourceLineNo">445</span>   * the future, {@code cacheDataOnWrite} should honor the CF level {@code isBlockCacheEnabled}<a name="line.445"></a>
+<span class="sourceLineNo">446</span>   * configuration.<a name="line.446"></a>
+<span class="sourceLineNo">447</span>   */<a name="line.447"></a>
+<span class="sourceLineNo">448</span>  public boolean shouldReadBlockFromCache(BlockType blockType) {<a name="line.448"></a>
+<span class="sourceLineNo">449</span>    if (!isBlockCacheEnabled()) {<a name="line.449"></a>
+<span class="sourceLineNo">450</span>      return false;<a name="line.450"></a>
+<span class="sourceLineNo">451</span>    }<a name="line.451"></a>
+<span class="sourceLineNo">452</span>    if (cacheDataOnRead) {<a name="line.452"></a>
+<span class="sourceLineNo">453</span>      return true;<a name="line.453"></a>
+<span class="sourceLineNo">454</span>    }<a name="line.454"></a>
+<span class="sourceLineNo">455</span>    if (prefetchOnOpen) {<a name="line.455"></a>
+<span class="sourceLineNo">456</span>      return true;<a name="line.456"></a>
+<span class="sourceLineNo">457</span>    }<a name="line.457"></a>
+<span class="sourceLineNo">458</span>    if (cacheDataOnWrite) {<a name="line.458"></a>
+<span class="sourceLineNo">459</span>      return true;<a name="line.459"></a>
+<span class="sourceLineNo">460</span>    }<a name="line.460"></a>
+<span class="sourceLineNo">461</span>    if (blockType == null) {<a name="line.461"></a>
+<span class="sourceLineNo">462</span>      return true;<a name="line.462"></a>
+<span class="sourceLineNo">463</span>    }<a name="line.463"></a>
+<span class="sourceLineNo">464</span>    if (blockType.getCategory() == BlockCategory.BLOOM ||<a name="line.464"></a>
+<span class="sourceLineNo">465</span>            blockType.getCategory() == BlockCategory.INDEX) {<a name="line.465"></a>
+<span class="sourceLineNo">466</span>      return true;<a name="line.466"></a>
+<span class="sourceLineNo">467</span>    }<a name="line.467"></a>
+<span class="sourceLineNo">468</span>    return false;<a name="line.468"></a>
+<span class="sourceLineNo">469</span>  }<a name="line.469"></a>
+<span class="sourceLineNo">470</span><a name="line.470"></a>
+<span class="sourceLineNo">471</span>  /**<a name="line.471"></a>
+<span class="sourceLineNo">472</span>   * If we make sure the block could not be cached, we will not acquire the lock<a name="line.472"></a>
+<span class="sourceLineNo">473</span>   * otherwise we will acquire lock<a name="line.473"></a>
+<span class="sourceLineNo">474</span>   */<a name="line.474"></a>
+<span class="sourceLineNo">475</span>  public boolean shouldLockOnCacheMiss(BlockType blockType) {<a name="line.475"></a>
+<span class="sourceLineNo">476</span>    if (blockType == null) {<a name="line.476"></a>
+<span class="sourceLineNo">477</span>      return true;<a name="line.477"></a>
+<span class="sourceLineNo">478</span>    }<a name="line.478"></a>
+<span class="sourceLineNo">479</span>    return shouldCacheBlockOnRead(blockType.getCategory());<a name="line.479"></a>
+<span class="sourceLineNo">480</span>  }<a name="line.480"></a>
+<span class="sourceLineNo">481</span><a name="line.481"></a>
+<span class="sourceLineNo">482</span>  @Override<a name="line.482"></a>
+<span class="sourceLineNo">483</span>  public String toString() {<a name="line.483"></a>
+<span class="sourceLineNo">484</span>    if (!isBlockCacheEnabled()) {<a name="line.484"></a>
+<span class="sourceLineNo">485</span>      return "CacheConfig:disabled";<a name="line.485"></a>
 <span class="sourceLineNo">486</span>    }<a name="line.486"></a>
-<span class="sourceLineNo">487</span>    if (blockType == null) {<a name="line.487"></a>
-<span class="sourceLineNo">488</span>      return true;<a name="line.488"></a>
-<span class="sourceLineNo">489</span>    }<a name="line.489"></a>
-<span class="sourceLineNo">490</span>    if (blockType.getCategory() == BlockCategory.BLOOM ||<a name="line.490"></a>
-<span class="sourceLineNo">491</span>            blockType.getCategory() == BlockCategory.INDEX) {<a name="line.491"></a>
-<span class="sourceLineNo">492</span>      return true;<a name="line.492"></a>
-<span class="sourceLineNo">493</span>    }<a name="line.493"></a>
-<span class="sourceLineNo">494</span>    return false;<a name="line.494"></a>
+<span class="sourceLineNo">487</span>    return "blockCache=" + getBlockCache() +<a name="line.487"></a>
+<span class="sourceLineNo">488</span>      ", cacheDataOnRead=" + shouldCacheDataOnRead() +<a name="line.488"></a>
+<span class="sourceLineNo">489</span>      ", cacheDataOnWrite=" + shouldCacheDataOnWrite() +<a name="line.489"></a>
+<span class="sourceLineNo">490</span>      ", cacheIndexesOnWrite=" + shouldCacheIndexesOnWrite() +<a name="line.490"></a>
+<span class="sourceLineNo">491</span>      ", cacheBloomsOnWrite=" + shouldCacheBloomsOnWrite() +<a name="line.491"></a>
+<span class="sourceLineNo">492</span>      ", cacheEvictOnClose=" + shouldEvictOnClose() +<a name="line.492"></a>
+<span class="sourceLineNo">493</span>      ", cacheDataCompressed=" + shouldCacheDataCompressed() +<a name="line.493"></a>
+<span class="sourceLineNo">494</span>      ", prefetchOnOpen=" + shouldPrefetchOnOpen();<a name="line.494"></a>
 <span class="sourceLineNo">495</span>  }<a name="line.495"></a>
 <span class="sourceLineNo">496</span><a name="line.496"></a>
-<span class="sourceLineNo">497</span>  /**<a name="line.497"></a>
-<span class="sourceLineNo">498</span>   * If we make sure the block could not be cached, we will not acquire the lock<a name="line.498"></a>
-<span class="sourceLineNo">499</span>   * otherwise we will acquire lock<a name="line.499"></a>
-<span class="sourceLineNo">500</span>   */<a name="line.500"></a>
-<span class="sourceLineNo">501</span>  public boolean shouldLockOnCacheMiss(BlockType blockType) {<a name="line.501"></a>
-<span class="sourceLineNo">502</span>    if (blockType == null) {<a name="line.502"></a>
-<span class="sourceLineNo">503</span>      return true;<a name="line.503"></a>
-<span class="sourceLineNo">504</span>    }<a name="line.504"></a>
-<span class="sourceLineNo">505</span>    return shouldCacheBlockOnRead(blockType.getCategory());<a name="line.505"></a>
-<span class="sourceLineNo">506</span>  }<a name="line.506"></a>
-<span class="sourceLineNo">507</span><a name="line.507"></a>
-<span class="sourceLineNo">508</span>  @Override<a name="line.508"></a>
-<span class="sourceLineNo">509</span>  public String toString() {<a name="line.509"></a>
-<span class="sourceLineNo">510</span>    if (!isBlockCacheEnabled()) {<a name="line.510"></a>
-<span class="sourceLineNo">511</span>      return "CacheConfig:disabled";<a name="line.511"></a>
-<span class="sourceLineNo">512</span>    }<a name="line.512"></a>
-<span class="sourceLineNo">513</span>    return "blockCache=" + getBlockCache() +<a name="line.513"></a>
-<span class="sourceLineNo">514</span>      ", cacheDataOnRead=" + shouldCacheDataOnRead() +<a name="line.514"></a>
-<span class="sourceLineNo">515</span>      ", cacheDataOnWrite=" + shouldCacheDataOnWrite() +<a name="line.515"></a>
-<span class="sourceLineNo">516</span>      ", cacheIndexesOnWrite=" + shouldCacheIndexesOnWrite() +<a name="line.516"></a>
-<span class="sourceLineNo">517</span>      ", cacheBloomsOnWrite=" + shouldCacheBloomsOnWrite() +<a name="line.517"></a>
-<span class="sourceLineNo">518</span>      ", cacheEvictOnClose=" + shouldEvictOnClose() +<a name="line.518"></a>
-<span class="sourceLineNo">519</span>      ", cacheDataCompressed=" + shouldCacheDataCompressed() +<a name="line.519"></a>
-<span class="sourceLineNo">520</span>      ", prefetchOnOpen=" + shouldPrefetchOnOpen();<a name="line.520"></a>
-<span class="sourceLineNo">521</span>  }<a name="line.521"></a>
-<span class="sourceLineNo">522</span><a name="line.522"></a>
-<span class="sourceLineNo">523</span>  // Static block cache reference and methods<a name="line.523"></a>
-<span class="sourceLineNo">524</span><a name="line.524"></a>
-<span class="sourceLineNo">525</span>  /**<a name="line.525"></a>
-<span class="sourceLineNo">526</span>   * Static reference to the block cache, or null if no caching should be used<a name="line.526"></a>
-<span class="sourceLineNo">527</span>   * at all.<a name="line.527"></a>
-<span class="sourceLineNo">528</span>   */<a name="line.528"></a>
-<span class="sourceLineNo">529</span>  // Clear this if in tests you'd make more than one block cache instance.<a name="line.529"></a>
-<span class="sourceLineNo">530</span>  @VisibleForTesting<a name="line.530"></a>
-<span class="sourceLineNo">531</span>  static BlockCache GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.531"></a>
-<span class="sourceLineNo">532</span>  private static LruBlockCache ONHEAP_CACHE_INSTANCE = null;<a name="line.532"></a>
-<span class="sourceLineNo">533</span>  private static BlockCache L2_CACHE_INSTANCE = null;// Can be BucketCache or External cache.<a name="line.533"></a>
+<span class="sourceLineNo">497</span>  // Static block cache reference and methods<a name="line.497"></a>
+<span class="sourceLineNo">498</span><a name="line.498"></a>
+<span class="sourceLineNo">499</span>  /**<a name="line.499"></a>
+<span class="sourceLineNo">500</span>   * Static reference to the block cache, or null if no caching should be used<a name="line.500"></a>
+<span class="sourceLineNo">501</span>   * at all.<a name="line.501"></a>
+<span class="sourceLineNo">502</span>   */<a name="line.502"></a>
+<span class="sourceLineNo">503</span>  // Clear this if in tests you'd make more than one block cache instance.<a name="line.503"></a>
+<span class="sourceLineNo">504</span>  @VisibleForTesting<a name="line.504"></a>
+<span class="sourceLineNo">505</span>  static BlockCache GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.505"></a>
+<span class="sourceLineNo">506</span>  private static LruBlockCache ONHEAP_CACHE_INSTANCE = null;<a name="line.506"></a>
+<span class="sourceLineNo">507</span>  private static BlockCache L2_CACHE_INSTANCE = null;// Can be BucketCache or External cache.<a name="line.507"></a>
+<span class="sourceLineNo">508</span><a name="line.508"></a>
+<span class="sourceLineNo">509</span>  /** Boolean whether we have disabled the block cache entirely. */<a name="line.509"></a>
+<span class="sourceLineNo">510</span>  @VisibleForTesting<a name="line.510"></a>
+<span class="sourceLineNo">511</span>  static boolean blockCacheDisabled = false;<a name="line.511"></a>
+<span class="sourceLineNo">512</span><a name="line.512"></a>
+<span class="sourceLineNo">513</span>  /**<a name="line.513"></a>
+<span class="sourceLineNo">514</span>   * @param c Configuration to use.<a name="line.514"></a>
+<span class="sourceLineNo">515</span>   * @return An L1 instance.  Currently an instance of LruBlockCache.<a name="line.515"></a>
+<span class="sourceLineNo">516</span>   */<a name="line.516"></a>
+<span class="sourceLineNo">517</span>  public static LruBlockCache getOnHeapCache(final Configuration c) {<a name="line.517"></a>
+<span class="sourceLineNo">518</span>    return getOnHeapCacheInternal(c);<a name="line.518"></a>
+<span class="sourceLineNo">519</span>  }<a name="line.519"></a>
+<span class="sourceLineNo">520</span><a name="line.520"></a>
+<span class="sourceLineNo">521</span>  public CacheStats getOnHeapCacheStats() {<a name="line.521"></a>
+<span class="sourceLineNo">522</span>    if (ONHEAP_CACHE_INSTANCE != null) {<a name="line.522"></a>
+<span class="sourceLineNo">523</span>      return ONHEAP_CACHE_INSTANCE.getStats();<a name="line.523"></a>
+<span class="sourceLineNo">524</span>    }<a name="line.524"></a>
+<span class="sourceLineNo">525</span>    return null;<a name="line.525"></a>
+<span class="sourceLineNo">526</span>  }<a name="line.526"></a>
+<span class="sourceLineNo">527</span><a name="line.527"></a>
+<span class="sourceLineNo">528</span>  public CacheStats getL2CacheStats() {<a name="line.528"></a>
+<span class="sourceLineNo">529</span>    if (L2_CACHE_INSTANCE != null) {<a name="line.529"></a>
+<span class="sourceLineNo">530</span>      return L2_CACHE_INSTANCE.getStats();<a name="line.530"></a>
+<span class="sourceLineNo">531</span>    }<a name="line.531"></a>
+<span class="sourceLineNo">532</span>    return null;<a name="line.532"></a>
+<span class="sourceLineNo">533</span>  }<a name="line.533"></a>
 <span class="sourceLineNo">534</span><a name="line.534"></a>
-<span class="sourceLineNo">535</span>  /** Boolean whether we have disabled the block cache entirely. */<a name="line.535"></a>
-<span class="sourceLineNo">536</span>  @VisibleForTesting<a name="line.536"></a>
-<span class="sourceLineNo">537</span>  static boolean blockCacheDisabled = false;<a name="line.537"></a>
-<span class="sourceLineNo">538</span><a name="line.538"></a>
-<span class="sourceLineNo">539</span>  /**<a name="line.539"></a>
-<span class="sourceLineNo">540</span>   * @param c Configuration to use.<a name="line.540"></a>
-<span class="sourceLineNo">541</span>   * @return An L1 instance.  Currently an instance of LruBlockCache.<a name="line.541"></a>
-<span class="sourceLineNo">542</span>   */<a name="line.542"></a>
-<span class="sourceLineNo">543</span>  public static LruBlockCache getOnHeapCache(final Configuration c) {<a name="line.543"></a>
-<span class="sourceLineNo">544</span>    return getOnHeapCacheInternal(c);<a name="line.544"></a>
-<span class="sourceLineNo">545</span>  }<a name="line.545"></a>
-<span class="sourceLineNo">546</span><a name="line.546"></a>
-<span class="sourceLineNo">547</span>  public CacheStats getOnHeapCacheStats() {<a name="line.547"></a>
-<span class="sourceLineNo">548</span>    if (ONHEAP_CACHE_INSTANCE != null) {<a name="line.548"></a>
-<span class="sourceLineNo">549</span>      return ONHEAP_CACHE_INSTANCE.getStats();<a name="line.549"></a>
-<span class="sourceLineNo">550</span>    }<a name="line.550"></a>
-<span class="sourceLineNo">551</span>    return null;<a name="line.551"></a>
-<span class="sourceLineNo">552</span>  }<a name="line.552"></a>
-<span class="sourceLineNo">553</span><a name="line.553"></a>
-<span class="sourceLineNo">554</span>  public CacheStats getL2CacheStats() {<a name="line.554"></a>
-<span class="sourceLineNo">555</span>    if (L2_CACHE_INSTANCE != null) {<a name="line.555"></a>
-<span class="sourceLineNo">556</span>      return L2_CACHE_INSTANCE.getStats();<a name="line.556"></a>
-<span class="sourceLineNo">557</span>    }<a name="line.557"></a>
-<span class="sourceLineNo">558</span>    return null;<a name="line.558"></a>
-<span class="sourceLineNo">559</span>  }<a name="line.559"></a>
+<span class="sourceLineNo">535</span>  /**<a name="line.535"></a>
+<span class="sourceLineNo">536</span>   * @param c Configuration to use.<a name="line.536"></a>
+<span class="sourceLineNo">537</span>   * @return An L1 instance.  Currently an instance of LruBlockCache.<a name="line.537"></a>
+<span class="sourceLineNo">538</span>   */<a name="line.538"></a>
+<span class="sourceLineNo">539</span>  private synchronized static LruBlockCache getOnHeapCacheInternal(final Configuration c) {<a name="line.539"></a>
+<span class="sourceLineNo">540</span>    if (ONHEAP_CACHE_INSTANCE != null) {<a name="line.540"></a>
+<span class="sourceLineNo">541</span>      return ONHEAP_CACHE_INSTANCE;<a name="line.541"></a>
+<span class="sourceLineNo">542</span>    }<a name="line.542"></a>
+<span class="sourceLineNo">543</span>    final long cacheSize = MemorySizeUtil.getOnHeapCacheSize(c);<a name="line.543"></a>
+<span class="sourceLineNo">544</span>    if (cacheSize &lt; 0) {<a name="line.544"></a>
+<span class="sourceLineNo">545</span>      blockCacheDisabled = true;<a name="line.545"></a>
+<span class="sourceLineNo">546</span>    }<a name="line.546"></a>
+<span class="sourceLineNo">547</span>    if (blockCacheDisabled) return null;<a name="line.547"></a>
+<span class="sourceLineNo">548</span>    int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);<a name="line.548"></a>
+<span class="sourceLineNo">549</span>    LOG.info("Allocating onheap LruBlockCache size=" +<a name="line.549"></a>
+<span class="sourceLineNo">550</span>      StringUtils.byteDesc(cacheSize) + ", blockSize=" + StringUtils.byteDesc(blockSize));<a name="line.550"></a>
+<span class="sourceLineNo">551</span>    ONHEAP_CACHE_INSTANCE = new LruBlockCache(cacheSize, blockSize, true, c);<a name="line.551"></a>
+<span class="sourceLineNo">552</span>    return ONHEAP_CACHE_INSTANCE;<a name="line.552"></a>
+<span class="sourceLineNo">553</span>  }<a name="line.553"></a>
+<span class="sourceLineNo">554</span><a name="line.554"></a>
+<span class="sourceLineNo">555</span>  private static BlockCache getExternalBlockcache(Configuration c) {<a name="line.555"></a>
+<span class="sourceLineNo">556</span>    if (LOG.isDebugEnabled()) {<a name="line.556"></a>
+<span class="sourceLineNo">557</span>      LOG.debug("Trying to use External l2 cache");<a name="line.557"></a>
+<span class="sourceLineNo">558</span>    }<a name="line.558"></a>
+<span class="sourceLineNo">559</span>    Class klass = null;<a name="line.559"></a>
 <span class="sourceLineNo">560</span><a name="line.560"></a>
-<span class="sourceLineNo">561</span>  /**<a name="line.561"></a>
-<span class="sourceLineNo">562</span>   * @param c Configuration to use.<a name="line.562"></a>
-<span class="sourceLineNo">563</span>   * @return An L1 instance.  Currently an instance of LruBlockCache.<a name="line.563"></a>
-<span class="sourceLineNo">564</span>   */<a name="line.564"></a>
-<span class="sourceLineNo">565</span>  private synchronized static LruBlockCache getOnHeapCacheInternal(final Configuration c) {<a name="line.565"></a>
-<span class="sourceLineNo">566</span>    if (ONHEAP_CACHE_INSTANCE != null) {<a name="line.566"></a>
-<span class="sourceLineNo">567</span>      return ONHEAP_CACHE_INSTANCE;<a name="line.567"></a>
-<span class="sourceLineNo">568</span>    }<a name="line.568"></a>
-<span class="sourceLineNo">569</span>    final long cacheSize = MemorySizeUtil.getOnHeapCacheSize(c);<a name="line.569"></a>
-<span class="sourceLineNo">570</span>    if (cacheSize &lt; 0) {<a name="line.570"></a>
-<span class="sourceLineNo">571</span>      blockCacheDisabled = true;<a name="line.571"></a>
-<span class="sourceLineNo">572</span>    }<a name="line.572"></a>
-<span class="sourceLineNo">573</span>    if (blockCacheDisabled) return null;<a name="line.573"></a>
-<span class="sourceLineNo">574</span>    int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);<a name="line.574"></a>
-<span class="sourceLineNo">575</span>    LOG.info("Allocating onheap LruBlockCache size=" +<a name="line.575"></a>
-<span class="sourceLineNo">576</span>      StringUtils.byteDesc(cacheSize) + ", blockSize=" + StringUtils.byteDesc(blockSize));<a name="line.576"></a>
-<span class="sourceLineNo">577</span>    ONHEAP_CACHE_INSTANCE = new LruBlockCache(cacheSize, blockSize, true, c);<a name="line.577"></a>
-<span class="sourceLineNo">578</span>    return ONHEAP_CACHE_INSTANCE;<a name="line.578"></a>
-<span class="sourceLineNo">579</span>  }<a name="line.579"></a>
-<span class="sourceLineNo">580</span><a name="line.580"></a>
-<span class="sourceLineNo">581</span>  private static BlockCache getExternalBlockcache(Configuration c) {<a name="line.581"></a>
-<span class="sourceLineNo">582</span>    if (LOG.isDebugEnabled()) {<a name="line.582"></a>
-<span class="sourceLineNo">583</span>      LOG.debug("Trying to use External l2 cache");<a name="line.583"></a>
-<span class="sourceLineNo">584</span>    }<a name="line.584"></a>
-<span class="sourceLineNo">585</span>    Class klass = null;<a name="line.585"></a>
-<span class="sourceLineNo">586</span><a name="line.586"></a>
-<span class="sourceLineNo">587</span>    // Get the class, from the config. s<a name="line.587"></a>
-<span class="sourceLineNo">588</span>    try {<a name="line.588"></a>
-<span class="sourceLineNo">589</span>      klass = ExternalBlockCaches.valueOf(c.get(EXTERNAL_BLOCKCACHE_CLASS_KEY, "memcache")).clazz;<a name="line.589"></a>
-<span class="sourceLineNo">590</span>    } catch (IllegalArgumentException exception) {<a name="line.590"></a>
-<span class="sourceLineNo">591</span>      try {<a name="line.591"></a>
-<span class="sourceLineNo">592</span>        klass = c.getClass(EXTERNAL_BLOCKCACHE_CLASS_KEY, Class.forName(<a name="line.592"></a>
-<span class="sourceLineNo">593</span>            "org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache"));<a name="line.593"></a>
-<span class="sourceLineNo">594</span>      } catch (ClassNotFoundException e) {<a name="line.594"></a>
-<span class="sourceLineNo">595</span>        return null;<a name="line.595"></a>
-<span class="sourceLineNo">596</span>      }<a name="line.596"></a>
-<span class="sourceLineNo">597</span>    }<a name="line.597"></a>
-<span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>    // Now try and create an instance of the block cache.<a name="line.599"></a>
-<span class="sourceLineNo">600</span>    try {<a name="line.600"></a>
-<span class="sourceLineNo">601</span>      LOG.info("Creating external block cache of type: " + klass);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      return (BlockCache) ReflectionUtils.newInstance(klass, c);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>    } catch (Exception e) {<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      LOG.warn("Error creating external block cache", e);<a name="line.604"></a>
-<span class="sourceLineNo">605</span>    }<a name="line.605"></a>
-<span class="sourceLineNo">606</span>    return null;<a name="line.606"></a>
-<span class="sourceLineNo">607</span><a name="line.607"></a>
-<span class="sourceLineNo">608</span>  }<a name="line.608"></a>
-<span class="sourceLineNo">609</span><a name="line.609"></a>
-<span class="sourceLineNo">610</span>  @VisibleForTesting<a name="line.610"></a>
-<span class="sourceLineNo">611</span>  static BucketCache getBucketCache(Configuration c) {<a name="line.611"></a>
-<span class="sourceLineNo">612</span>    // Check for L2.  ioengine name must be non-null.<a name="line.612"></a>
-<span class="sourceLineNo">613</span>    String bucketCacheIOEngineName = c.get(BUCKET_CACHE_IOENGINE_KEY, null);<a name="line.613"></a>
-<span class="sourceLineNo">614</span>    if (bucketCacheIOEngineName == null || bucketCacheIOEngineName.length() &lt;= 0) return null;<a name="line.614"></a>
-<span class="sourceLineNo">615</span><a name="line.615"></a>
-<span class="sourceLineNo">616</span>    int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);<a name="line.616"></a>
-<span class="sourceLineNo">617</span>    final long bucketCacheSize = MemorySizeUtil.getBucketCacheSize(c);<a name="line.617"></a>
-<span class="sourceLineNo">618</span>    if (bucketCacheSize &lt;= 0) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>      throw new IllegalStateException("bucketCacheSize &lt;= 0; Check " +<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        BUCKET_CACHE_SIZE_KEY + " setting and/or server java heap size");<a name="line.620"></a>
-<span class="sourceLineNo">621</span>    }<a name="line.621"></a>
-<span class="sourceLineNo">622</span>    if (c.get("hbase.bucketcache.percentage.in.combinedcache") != null) {<a name="line.622"></a>
-<span class="sourceLineNo">623</span>      LOG.warn("Configuration 'hbase.bucketcache.percentage.in.combinedcache' is no longer "<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          + "respected. See comments in http://hbase.apache.org/book.html#_changes_of_note");<a name="line.624"></a>
-<span class="sourceLineNo">625</span>    }<a name="line.625"></a>
-<span class="sourceLineNo">626</span>    int writerThreads = c.getInt(BUCKET_CACHE_WRITER_THREADS_KEY,<a name="line.626"></a>
-<span class="sourceLineNo">627</span>      DEFAULT_BUCKET_CACHE_WRITER_THREADS);<a name="line.627"></a>
-<span class="sourceLineNo">628</span>    int writerQueueLen = c.getInt(BUCKET_CACHE_WRITER_QUEUE_KEY,<a name="line.628"></a>
-<span class="sourceLineNo">629</span>      DEFAULT_BUCKET_CACHE_WRITER_QUEUE);<a name="line.629"></a>
-<span class="sourceLineNo">630</span>    String persistentPath = c.get(BUCKET_CACHE_PERSISTENT_PATH_KEY);<a name="line.630"></a>
-<span class="sourceLineNo">631</span>    String[] configuredBucketSizes = c.getStrings(BUCKET_CACHE_BUCKETS_KEY);<a name="line.631"></a>
-<span class="sourceLineNo">632</span>    int [] bucketSizes = null;<a name="line.632"></a>
-<span class="sourceLineNo">633</span>    if (configuredBucketSizes != null) {<a name="line.633"></a>
-<span class="sourceLineNo">634</span>      bucketSizes = new int[configuredBucketSizes.length];<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      for (int i = 0; i &lt; configuredBucketSizes.length; i++) {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());<a name="line.636"></a>
-<span class="sourceLineNo">637</span>        if (bucketSize % 256 != 0) {<a name="line.637"></a>
-<span class="sourceLineNo">638</span>          // We need all the bucket sizes to be multiples of 256. Having all the configured bucket<a name="line.638"></a>
-<span class="sourceLineNo">639</span>          // sizes to be multiples of 256 will ensure that the block offsets within buckets,<a name="line.639"></a>
-<span class="sourceLineNo">640</span>          // that are calculated, will also be multiples of 256.<a name="line.640"></a>
-<span class="sourceLineNo">641</span>          // See BucketEntry where offset to each block is represented using 5 bytes (instead of 8<a name="line.641"></a>
-<span class="sourceLineNo">642</span>          // bytes long). We would like to save heap overhead as less as possible.<a name="line.642"></a>
-<span class="sourceLineNo">643</span>          throw new IllegalArgumentException("Illegal value: " + bucketSize + " configured for '"<a name="line.643"></a>
-<span class="sourceLineNo">644</span>              + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be multiples of 256");<a name="line.644"></a>
-<span class="sourceLineNo">645</span>        }<a name="line.645"></a>
-<span class="sourceLineNo">646</span>        bucketSizes[i] = bucketSize;<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      }<a name="line.647"></a>
+<span class="sourceLineNo">561</span>    // Get the class, from the config. s<a name="line.561"></a>
+<span class="sourceLineNo">562</span>    try {<a name="line.562"></a>
+<span class="sourceLineNo">563</span>      klass = ExternalBlockCaches.valueOf(c.get(EXTERNAL_BLOCKCACHE_CLASS_KEY, "memcache")).clazz;<a name="line.563"></a>
+<span class="sourceLineNo">564</span>    } catch (IllegalArgumentException exception) {<a name="line.564"></a>
+<span class="sourceLineNo">565</span>      try {<a name="line.565"></a>
+<span class="sourceLineNo">566</span>        klass = c.getClass(EXTERNAL_BLOCKCACHE_CLASS_KEY, Class.forName(<a name="line.566"></a>
+<span class="sourceLineNo">567</span>            "org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache"));<a name="line.567"></a>
+<span class="sourceLineNo">568</span>      } catch (ClassNotFoundException e) {<a name="line.568"></a>
+<span class="sourceLineNo">569</span>        return null;<a name="line.569"></a>
+<span class="sourceLineNo">570</span>      }<a name="line.570"></a>
+<span class="sourceLineNo">571</span>    }<a name="line.571"></a>
+<span class="sourceLineNo">572</span><a name="line.572"></a>
+<span class="sourceLineNo">573</span>    // Now try and create an instance of the block cache.<a name="line.573"></a>
+<span class="sourceLineNo">574</span>    try {<a name="line.574"></a>
+<span class="sourceLineNo">575</span>      LOG.info("Creating external block cache of type: " + klass);<a name="line.575"></a>
+<span class="sourceLineNo">576</span>      return (BlockCache) ReflectionUtils.newInstance(klass, c);<a name="line.576"></a>
+<span class="sourceLineNo">577</span>    } catch (Exception e) {<a name="line.577"></a>
+<span class="sourceLineNo">578</span>      LOG.warn("Error creating external block cache", e);<a name="line.578"></a>
+<span class="sourceLineNo">579</span>    }<a name="line.579"></a>
+<span class="sourceLineNo">580</span>    return null;<a name="line.580"></a>
+<span class="sourceLineNo">581</span><a name="line.581"></a>
+<span class="sourceLineNo">582</span>  }<a name="line.582"></a>
+<span class="sourceLineNo">583</span><a name="line.583"></a>
+<span class="sourceLineNo">584</span>  @VisibleForTesting<a name="line.584"></a>
+<span class="sourceLineNo">585</span>  static BucketCache getBucketCache(Configuration c) {<a name="line.585"></a>
+<span class="sourceLineNo">586</span>    // Check for L2.  ioengine name must be non-null.<a name="line.586"></a>
+<span class="sourceLineNo">587</span>    String bucketCacheIOEngineName = c.get(BUCKET_CACHE_IOENGINE_KEY, null);<a name="line.587"></a>
+<span class="sourceLineNo">588</span>    if (bucketCacheIOEngineName == null || bucketCacheIOEngineName.length() &lt;= 0) return null;<a name="line.588"></a>
+<span class="sourceLineNo">589</span><a name="line.589"></a>
+<span class="sourceLineNo">590</span>    int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);<a name="line.590"></a>
+<span class="sourceLineNo">591</span>    final long bucketCacheSize = MemorySizeUtil.getBucketCacheSize(c);<a name="line.591"></a>
+<span class="sourceLineNo">592</span>    if (bucketCacheSize &lt;= 0) {<a name="line.592"></a>
+<span class="sourceLineNo">593</span>      throw new IllegalStateException("bucketCacheSize &lt;= 0; Check " +<a name="line.593"></a>
+<span class="sourceLineNo">594</span>        BUCKET_CACHE_SIZE_KEY + " setting and/or server java heap size");<a name="line.594"></a>
+<span class="sourceLineNo">595</span>    }<a name="line.595"></a>
+<span class="sourceLineNo">596</span>    if (c.get("hbase.bucketcache.percentage.in.combinedcache") != null) {<a name="line.596"></a>
+<span class="sourceLineNo">597</span>      LOG.warn("Configuration 'hbase.bucketcache.percentage.in.combinedcache' is no longer "<a name="line.597"></a>
+<span class="sourceLineNo">598</span>          + "respected. See comments in http://hbase.apache.org/book.html#_changes_of_note");<a name="line.598"></a>
+<span class="sourceLineNo">599</span>    }<a name="line.599"></a>
+<span class="sourceLineNo">600</span>    int writerThreads = c.getInt(BUCKET_CACHE_WRITER_THREADS_KEY,<a name="line.600"></a>
+<span class="sourceLineNo">601</span>      DEFAULT_BUCKET_CACHE_WRITER_THREADS);<a name="line.601"></a>
+<span class="sourceLineNo">602</span>    int writerQueueLen = c.getInt(BUCKET_CACHE_WRITER_QUEUE_KEY,<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      DEFAULT_BUCKET_CACHE_WRITER_QUEUE);<a name="line.603"></a>
+<span class="sourceLineNo">604</span>    String persistentPath = c.get(BUCKET_CACHE_PERSISTENT_PATH_KEY);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>    String[] configuredBucketSizes = c.getStrings(BUCKET_CACHE_BUCKETS_KEY);<a name="line.605"></a>
+<span class="sourceLineNo">606</span>    int [] bucketSizes = null;<a name="line.606"></a>
+<span class="sourceLineNo">607</span>    if (configuredBucketSizes != null) {<a name="line.607"></a>
+<span class="sourceLineNo">608</span>      bucketSizes = new int[configuredBucketSizes.length];<a name="line.608"></a>
+<span class="sourceLineNo">609</span>      for (int i = 0; i &lt; configuredBucketSizes.length; i++) {<a name="line.609"></a>
+<span class="sourceLineNo">610</span>        int bucketSize = Integer.parseInt(configuredBucketSizes[i].trim());<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        if (bucketSize % 256 != 0) {<a name="line.611"></a>
+<span class="sourceLineNo">612</span>          // We need all the bucket sizes to be multiples of 256. Having all the configured bucket<a name="line.612"></a>
+<span class="sourceLineNo">613</span>          // sizes to be multiples of 256 will ensure that the block offsets within buckets,<a name="line.613"></a>
+<span class="sourceLineNo">614</span>          // that are calculated, will also be multiples of 256.<a name="line.614"></a>
+<span class="sourceLineNo">615</span>          // See BucketEntry where offset to each block is represented using 5 bytes (instead of 8<a name="line.615"></a>
+<span class="sourceLineNo">616</span>          // bytes long). We would like to save heap overhead as less as possible.<a name="line.616"></a>
+<span class="sourceLineNo">617</span>          throw new IllegalArgumentException("Illegal value: " + bucketSize + " configured for '"<a name="line.617"></a>
+<span class="sourceLineNo">618</span>              + BUCKET_CACHE_BUCKETS_KEY + "'. All bucket sizes to be multiples of 256");<a name="line.618"></a>
+<span class="sourceLineNo">619</span>        }<a name="line.619"></a>
+<span class="sourceLineNo">620</span>        bucketSizes[i] = bucketSize;<a name="line.620"></a>
+<span class="sourceLineNo">621</span>      }<a name="line.621"></a>
+<span class="sourceLineNo">622</span>    }<a name="line.622"></a>
+<span class="sourceLineNo">623</span>    BucketCache bucketCache = null;<a name="line.623"></a>
+<span class="sourceLineNo">624</span>    try {<a name="line.624"></a>
+<span class="sourceLineNo">625</span>      int ioErrorsTolerationDuration = c.getInt(<a name="line.625"></a>
+<span class="sourceLineNo">626</span>        "hbase.bucketcache.ioengine.errors.tolerated.duration",<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        BucketCache.DEFAULT_ERROR_TOLERATION_DURATION);<a name="line.627"></a>
+<span class="sourceLineNo">628</span>      // Bucket cache logs its stats on creation internal to the constructor.<a name="line.628"></a>
+<span class="sourceLineNo">629</span>      bucketCache = new BucketCache(bucketCacheIOEngineName,<a name="line.629"></a>
+<span class="sourceLineNo">630</span>        bucketCacheSize, blockSize, bucketSizes, writerThreads, writerQueueLen, persistentPath,<a name="line.630"></a>
+<span class="sourceLineNo">631</span>        ioErrorsTolerationDuration, c);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>    } catch (IOException ioex) {<a name="line.632"></a>
+<span class="sourceLineNo">633</span>      LOG.error("Can't instantiate bucket cache", ioex); throw new RuntimeException(ioex);<a name="line.633"></a>
+<span class="sourceLineNo">634</span>    }<a name="line.634"></a>
+<span class="sourceLineNo">635</span>    return bucketCache;<a name="line.635"></a>
+<span class="sourceLineNo">636</span>  }<a name="line.636"></a>
+<span class="sourceLineNo">637</span><a name="line.637"></a>
+<span class="sourceLineNo">638</span>  /**<a name="line.638"></a>
+<span class="sourceLineNo">639</span>   * Returns the block cache or &lt;code&gt;null&lt;/code&gt; in case none should be used.<a name="line.639"></a>
+<span class="sourceLineNo">640</span>   * Sets GLOBAL_BLOCK_CACHE_INSTANCE<a name="line.640"></a>
+<span class="sourceLineNo">641</span>   *<a name="line.641"></a>
+<span class="sourceLineNo">642</span>   * @param conf  The current configuration.<a name="line.642"></a>
+<span class="sourceLineNo">643</span>   * @return The block cache or &lt;code&gt;null&lt;/code&gt;.<a name="line.643"></a>
+<span class="sourceLineNo">644</span>   */<a name="line.644"></a>
+<span class="sourceLineNo">645</span>  public static synchronized BlockCache instantiateBlockCache(Configuration conf) {<a name="line.645"></a>
+<span class="sourceLineNo">646</span>    if (GLOBAL_BLOCK_CACHE_INSTANCE != null) {<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      return GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.647"></a>
 <span class="sourceLineNo">648</span>    }<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    BucketCache bucketCache = null;<a name="line.649"></a>
-<span class="sourceLineNo">650</span>    try {<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      int ioErrorsTolerationDuration = c.getInt(<a name="line.651"></a>
-<span class="sourceLineNo">652</span>        "hbase.bucketcache.ioengine.errors.tolerated.duration",<a name="line.652"></a>
-<span class="sourceLineNo">653</span>        BucketCache.DEFAULT_ERROR_TOLERATION_DURATION);<a name="line.653"></a>
-<span class="sourceLineNo">654</span>      // Bucket cache logs its stats on creation internal to the constructor.<a name="line.654"></a>
-<span class="sourceLineNo">655</span>      bucketCache = new BucketCache(bucketCacheIOEngineName,<a name="line.655"></a>
-<span class="sourceLineNo">656</span>        bucketCacheSize, blockSize, bucketSizes, writerThreads, writerQueueLen, persistentPath,<a name="line.656"></a>
-<span class="sourceLineNo">657</span>        ioErrorsTolerationDuration, c);<a name="line.657"></a>
-<span class="sourceLineNo">658</span>    } catch (IOException ioex) {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>      LOG.error("Can't instantiate bucket cache", ioex); throw new RuntimeException(ioex);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    }<a name="line.660"></a>
-<span class="sourceLineNo">661</span>    return bucketCache;<a name="line.661"></a>
-<span class="sourceLineNo">662</span>  }<a name="line.662"></a>
-<span class="sourceLineNo">663</span><a name="line.663"></a>
-<span class="sourceLineNo">664</span>  /**<a name="line.664"></a>
-<span class="sourceLineNo">665</span>   * Returns the block cache or &lt;code&gt;null&lt;/code&gt; in case none should be used.<a name="line.665"></a>
-<span class="sourceLineNo">666</span>   * Sets GLOBAL_BLOCK_CACHE_INSTANCE<a name="line.666"></a>
-<span class="sourceLineNo">667</span>   *<a name="line.667"></a>
-<span class="sourceLineNo">668</span>   * @param conf  The current configuration.<a name="line.668"></a>
-<span class="sourceLineNo">669</span>   * @return The block cache or &lt;code&gt;null&lt;/code&gt;.<a name="line.669"></a>
-<span class="sourceLineNo">670</span>   */<a name="line.670"></a>
-<span class="sourceLineNo">671</span>  public static synchronized BlockCache instantiateBlockCache(Configuration conf) {<a name="line.671"></a>
-<span class="sourceLineNo">672</span>    if (GLOBAL_BLOCK_CACHE_INSTANCE != null) return GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.672"></a>
-<span class="sourceLineNo">673</span>    if (blockCacheDisabled) return null;<a name="line.673"></a>
-<span class="sourceLineNo">674</span>    LruBlockCache onHeapCache = getOnHeapCacheInternal(conf);<a name="line.674"></a>
-<span class="sourceLineNo">675</span>    // blockCacheDisabled is set as a side-effect of getL1Internal(), so check it again after the<a name="line.675"></a>
-<span class="sourceLineNo">676</span>    // call.<a name="line.676"></a>
-<span class="sourceLineNo">677</span>    if (blockCacheDisabled) return null;<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    boolean useExternal = conf.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT);<a name="line.678"></a>
-<span class="sourceLineNo">679</span>    if (useExternal) {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>      L2_CACHE_INSTANCE = getExternalBlockcache(conf);<a name="line.680"></a>
-<span class="sourceLineNo">681</span>      GLOBAL_BLOCK_CACHE_INSTANCE = L2_CACHE_INSTANCE == null ? onHeapCache<a name="line.681"></a>
-<span class="sourceLineNo">682</span>          : new InclusiveCombinedBlockCache(onHeapCache, L2_CACHE_INSTANCE);<a name="line.682"></a>
-<span class="sourceLineNo">683</span>    } else {<a name="line.683"></a>
-<span class="sourceLineNo">684</span>      // otherwise use the bucket cache.<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      L2_CACHE_INSTANCE = getBucketCache(conf);<a name="line.685"></a>
-<span class="sourceLineNo">686</span>      if (!conf.getBoolean("hbase.bucketcache.combinedcache.enabled", true)) {<a name="line.686"></a>
-<span class="sourceLineNo">687</span>        // Non combined mode is off from 2.0<a name="line.687"></a>
-<span class="sourceLineNo">688</span>        LOG.warn(<a name="line.688"></a>
-<span class="sourceLineNo">689</span>            "From HBase 2.0 onwards only combined mode of LRU cache and bucket cache is available");<a name="line.689"></a>
-<span class="sourceLineNo">690</span>      }<a name="line.690"></a>
-<span class="sourceLineNo">691</span>      GLOBAL_BLOCK_CACHE_INSTANCE = L2_CACHE_INSTANCE == null ? onHeapCache<a name="line.691"></a>
-<span class="sourceLineNo">692</span>          : new CombinedBlockCache(onHeapCache, L2_CACHE_INSTANCE);<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    }<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    return GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.694"></a>
-<span class="sourceLineNo">695</span>  }<a name="line.695"></a>
-<span class="sourceLineNo">696</span><a name="line.696"></a>
-<span class="sourceLineNo">697</span>  // Supposed to use only from tests. Some tests want to reinit the Global block cache instance<a name="line.697"></a>
-<span class="sourceLineNo">698</span>  @VisibleForTesting<a name="line.698"></a>
-<span class="sourceLineNo">699</span>  static synchronized void clearGlobalInstances() {<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    ONHEAP_CACHE_INSTANCE = null;<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    L2_CACHE_INSTANCE = null;<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    GLOBAL_BLOCK_CACHE_INSTANCE = null;<a name="line.702"></a>
-<span class="sourceLineNo">703</span>  }<a name="line.703"></a>
-<span class="sourceLineNo">704</span>}<a name="line.704"></a>
+<span class="sourceLineNo">649</span>    if (blockCacheDisabled) {<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      return null;<a name="line.650"></a>
+<span class="sourceLineNo">651</span>    }<a name="line.651"></a>
+<span class="sourceLineNo">652</span>    LruBlockCache onHeapCache = getOnHeapCacheInternal(conf);<a name="line.652"></a>
+<span class="sourceLineNo">653</span>    // blockCacheDisabled is set as a side-effect of getL1Internal(), so check it again after the<a name="line.653"></a>
+<span class="sourceLineNo">654</span>    // call.<a name="line.654"></a>
+<span class="sourceLineNo">655</span>    if (blockCacheDisabled) {<a name="line.655"></a>
+<span class="sourceLineNo">656</span>      return null;<a name="line.656"></a>
+<span class="sourceLineNo">657</span>    }<a name="line.657"></a>
+<span class="sourceLineNo">658</span>    boolean useExternal = conf.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT);<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    if (useExternal) {<a name="line.659"></a>
+<span class="sourceLineNo">660</span>      L2_CACHE_INSTANCE = getExternalBlockcache(conf);<a name="line.660"></a>
+<span class="sourceLineNo">661</span>      GLOBAL_BLOCK_CACHE_INSTANCE = L2_CACHE_INSTANCE == null ? onHeapCache<a name="line.661"></a>
+<span class="sourceLineNo">662</span>          : new InclusiveCombinedBlockCache(onHeapCache, L2_CACHE_INSTANCE);<a name="line.662"></a>
+<span class="sourceLineNo">663</span>    } else {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>      // otherwise use the bucket cache.<a name="line.664"></a>
+<span class="sourceLineNo">665</span>      L2_CACHE_INSTANCE = getBucketCache(conf);<a name="line.665"></a>
+<span class="sourceLineNo">666</span>      if (!conf.getBoolean("hbase.bucketcache.combinedcache.enabled", true)) {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>        // Non combined mode is off from 2.0<a name="line.667"></a>
+<span class="sourceLineNo">668</span>        LOG.warn(<a name="line.668"></a>
+<span class="sourceLineNo">669</span>            "From HBase 2.0 onwards only combined mode of LRU cache and bucket cache is available");<a name="line.669"></a>
+<span class="sourceLineNo">670</span>      }<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      GLOBAL_BLOCK_CACHE_INSTANCE = L2_CACHE_INSTANCE == null ? onHeapCache<a name="line.671"></a>
+<span class="sourceLineNo">672</span>          : new CombinedBlockCache(onHeapCache, L2_CACHE_INSTANCE);<a name="line.672"></a>
+<span class="sourceLineNo">673</span>    }<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    return GLOBAL_BLOCK_CACHE_INSTANCE;<a name="line.674"></a>
+<span class="sourceLineNo">675</span>  }<a name="line.675"></a>
+<span class="sourceLineNo">676</span><a name="line.676"></a>
+<span class="sourceLineNo">677</span>  // Supposed to use only from tests. Some tests want to reinit the Global block cache instance<a name="line.677"></a>
+<span class="sourceLineNo">678</span>  @VisibleForTesting<a name="line.678"></a>
+<span class="sourceLineNo">679</span>  static synchronized void clearGlobalInstances() {<a name="line.679"></a>
+<span class="sourceLineNo">680</span>    ONHEAP_CACHE_INSTANCE = null;<a name="line.680"></a>
+<span class="sourceLineNo">681</span>    L2_CACHE_INSTANCE = null;<a name="line.681"></a>
+<span class="sourceLineNo">682</span>    GLOBAL_BLOCK_CACHE_INSTANCE = null;<a name="line.682"></a>
+<span class="sourceLineNo">683</span>

<TRUNCATED>

[17/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index 6369c27..ea05301 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -603,3251 +603,3256 @@
 <span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
 <span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
 <span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.599"></a>
+<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span><a name="line.599"></a>
 <span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
 <span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      cacheConfig = new CacheConfig(conf, !isMasterNotCarryTable);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      mobCacheConfig = new MobCacheConfig(conf, !isMasterNotCarryTable);<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        @Override<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>        }<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      };<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>      initializeFileSystem();<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.612"></a>
-<span class="sourceLineNo">613</span><a name="line.613"></a>
-<span class="sourceLineNo">614</span>      this.configurationManager = new ConfigurationManager();<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.615"></a>
-<span class="sourceLineNo">616</span><a name="line.616"></a>
-<span class="sourceLineNo">617</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>        // Open connection to zookeeper and set primary watcher<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.620"></a>
-<span class="sourceLineNo">621</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.621"></a>
-<span class="sourceLineNo">622</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.622"></a>
-<span class="sourceLineNo">623</span>        if (!this.masterless) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>          masterAddressTracker.start();<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span>          clusterStatusTracker.start();<a name="line.630"></a>
-<span class="sourceLineNo">631</span>        } else {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker = null;<a name="line.632"></a>
-<span class="sourceLineNo">633</span>          clusterStatusTracker = null;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>        }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      } else {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        zooKeeper = null;<a name="line.636"></a>
-<span class="sourceLineNo">637</span>        masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>        clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>      }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      this.rpcServices.start(zooKeeper);<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.641"></a>
-<span class="sourceLineNo">642</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      // class HRS. TODO.<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      this.choreService = new ChoreService(getName(), true);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      this.executorService = new ExecutorService(getName());<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      putUpWebUI();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    } catch (Throwable t) {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      // cause of failed startup is lost.<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      LOG.error("Failed construction RegionServer", t);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      throw t;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>  }<a name="line.655"></a>
-<span class="sourceLineNo">656</span><a name="line.656"></a>
-<span class="sourceLineNo">657</span>  // HMaster should override this method to load the specific config for master<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.662"></a>
-<span class="sourceLineNo">663</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.663"></a>
-<span class="sourceLineNo">664</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>        throw new IOException(msg);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      } else {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return rpcServices.isa.getHostName();<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      }<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    } else {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      return hostname;<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    }<a name="line.671"></a>
-<span class="sourceLineNo">672</span>  }<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>  /**<a name="line.674"></a>
-<span class="sourceLineNo">675</span>   * If running on Windows, do windows-specific setup.<a name="line.675"></a>
-<span class="sourceLineNo">676</span>   */<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        @Override<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        public void handle(Signal signal) {<a name="line.681"></a>
-<span class="sourceLineNo">682</span>          conf.reloadConfiguration();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>          cm.notifyAllObservers(conf);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      });<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    NettyEventLoopGroupConfig nelgc =<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    return nelgc;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>  }<a name="line.696"></a>
-<span class="sourceLineNo">697</span><a name="line.697"></a>
-<span class="sourceLineNo">698</span>  private void initializeFileSystem() throws IOException {<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    // (unless all is set to defaults).<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    return new FSTableDescriptors(this.conf,<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.716"></a>
+<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
+<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
+<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
+<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
+<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
+<span class="sourceLineNo">615</span><a name="line.615"></a>
+<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
+<span class="sourceLineNo">618</span><a name="line.618"></a>
+<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
+<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
+<span class="sourceLineNo">621</span><a name="line.621"></a>
+<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
+<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
+<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
+<span class="sourceLineNo">630</span><a name="line.630"></a>
+<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
+<span class="sourceLineNo">633</span><a name="line.633"></a>
+<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
+<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
+<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
+<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
+<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
+<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
+<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
+<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
+<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
+<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
+<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
+<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
+<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
+<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
+<span class="sourceLineNo">661</span><a name="line.661"></a>
+<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
+<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
+<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
+<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
+<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
+<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
+<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
+<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
+<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
+<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
+<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
+<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
+<span class="sourceLineNo">678</span><a name="line.678"></a>
+<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
+<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
+<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
+<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
+<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
+<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
+<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
+<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
+<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
+<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
+<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
+<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
+<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
+<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
+<span class="sourceLineNo">693</span><a name="line.693"></a>
+<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
+<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
+<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
+<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
+<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
+<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
+<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
+<span class="sourceLineNo">702</span><a name="line.702"></a>
+<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
+<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
+<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
+<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
+<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
+<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
+<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
+<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
+<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
+<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
 <span class="sourceLineNo">717</span>  }<a name="line.717"></a>
 <span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return null;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    user.login("hbase.regionserver.keytab.file",<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.725"></a>
+<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
+<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
+<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
+<span class="sourceLineNo">723</span><a name="line.723"></a>
+<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
+<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
 <span class="sourceLineNo">726</span>  }<a name="line.726"></a>
 <span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span><a name="line.728"></a>
-<span class="sourceLineNo">729</span>  /**<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   * Wait for an active Master.<a name="line.730"></a>
-<span class="sourceLineNo">731</span>   * See override in Master superclass for how it is used.<a name="line.731"></a>
-<span class="sourceLineNo">732</span>   */<a name="line.732"></a>
-<span class="sourceLineNo">733</span>  protected void waitForMasterActive() {}<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>  protected String getProcessName() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    return REGIONSERVER;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>  protected boolean canCreateBaseZNode() {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    return this.masterless;<a name="line.740"></a>
-<span class="sourceLineNo">741</span>  }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>  protected boolean canUpdateTableDescriptor() {<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    return false;<a name="line.744"></a>
-<span class="sourceLineNo">745</span>  }<a name="line.745"></a>
-<span class="sourceLineNo">746</span><a name="line.746"></a>
-<span class="sourceLineNo">747</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    return new RSRpcServices(this);<a name="line.748"></a>
-<span class="sourceLineNo">749</span>  }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>  protected void configureInfoServer() {<a name="line.751"></a>
-<span class="sourceLineNo">752</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.753"></a>
+<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
+<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
+<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
+<span class="sourceLineNo">732</span><a name="line.732"></a>
+<span class="sourceLineNo">733</span><a name="line.733"></a>
+<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
+<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
+<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
+<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
+<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">739</span><a name="line.739"></a>
+<span class="sourceLineNo">740</span>  protected String getProcessName() {<a name="line.740"></a>
+<span class="sourceLineNo">741</span>    return REGIONSERVER;<a name="line.741"></a>
+<span class="sourceLineNo">742</span>  }<a name="line.742"></a>
+<span class="sourceLineNo">743</span><a name="line.743"></a>
+<span class="sourceLineNo">744</span>  protected boolean canCreateBaseZNode() {<a name="line.744"></a>
+<span class="sourceLineNo">745</span>    return this.masterless;<a name="line.745"></a>
+<span class="sourceLineNo">746</span>  }<a name="line.746"></a>
+<span class="sourceLineNo">747</span><a name="line.747"></a>
+<span class="sourceLineNo">748</span>  protected boolean canUpdateTableDescriptor() {<a name="line.748"></a>
+<span class="sourceLineNo">749</span>    return false;<a name="line.749"></a>
+<span class="sourceLineNo">750</span>  }<a name="line.750"></a>
+<span class="sourceLineNo">751</span><a name="line.751"></a>
+<span class="sourceLineNo">752</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.752"></a>
+<span class="sourceLineNo">753</span>    return new RSRpcServices(this);<a name="line.753"></a>
 <span class="sourceLineNo">754</span>  }<a name="line.754"></a>
 <span class="sourceLineNo">755</span><a name="line.755"></a>
-<span class="sourceLineNo">756</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>    return RSDumpServlet.class;<a name="line.757"></a>
-<span class="sourceLineNo">758</span>  }<a name="line.758"></a>
-<span class="sourceLineNo">759</span><a name="line.759"></a>
-<span class="sourceLineNo">760</span>  @Override<a name="line.760"></a>
-<span class="sourceLineNo">761</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    /*<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * No stacking of instances is allowed for a single executorService name<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        instance.getDescriptorForType();<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.769"></a>
-<span class="sourceLineNo">770</span>          + " already registered, rejecting request from " + instance);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      return false;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>    }<a name="line.772"></a>
-<span class="sourceLineNo">773</span><a name="line.773"></a>
-<span class="sourceLineNo">774</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>    if (LOG.isDebugEnabled()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.776"></a>
+<span class="sourceLineNo">756</span>  protected void configureInfoServer() {<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.758"></a>
+<span class="sourceLineNo">759</span>  }<a name="line.759"></a>
+<span class="sourceLineNo">760</span><a name="line.760"></a>
+<span class="sourceLineNo">761</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.761"></a>
+<span class="sourceLineNo">762</span>    return RSDumpServlet.class;<a name="line.762"></a>
+<span class="sourceLineNo">763</span>  }<a name="line.763"></a>
+<span class="sourceLineNo">764</span><a name="line.764"></a>
+<span class="sourceLineNo">765</span>  @Override<a name="line.765"></a>
+<span class="sourceLineNo">766</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>    /*<a name="line.767"></a>
+<span class="sourceLineNo">768</span>     * No stacking of instances is allowed for a single executorService name<a name="line.768"></a>
+<span class="sourceLineNo">769</span>     */<a name="line.769"></a>
+<span class="sourceLineNo">770</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.770"></a>
+<span class="sourceLineNo">771</span>        instance.getDescriptorForType();<a name="line.771"></a>
+<span class="sourceLineNo">772</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.774"></a>
+<span class="sourceLineNo">775</span>          + " already registered, rejecting request from " + instance);<a name="line.775"></a>
+<span class="sourceLineNo">776</span>      return false;<a name="line.776"></a>
 <span class="sourceLineNo">777</span>    }<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    return true;<a name="line.778"></a>
-<span class="sourceLineNo">779</span>  }<a name="line.779"></a>
-<span class="sourceLineNo">780</span><a name="line.780"></a>
-<span class="sourceLineNo">781</span>  /**<a name="line.781"></a>
-<span class="sourceLineNo">782</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   */<a name="line.784"></a>
-<span class="sourceLineNo">785</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    Configuration conf = this.conf;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.787"></a>
-<span class="sourceLineNo">788</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.788"></a>
-<span class="sourceLineNo">789</span>      // the conf and unset the client ZK related properties<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      conf = new Configuration(this.conf);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.791"></a>
-<span class="sourceLineNo">792</span>    }<a name="line.792"></a>
-<span class="sourceLineNo">793</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    // and remote invocations.<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.799"></a>
-<span class="sourceLineNo">800</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.801"></a>
-<span class="sourceLineNo">802</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    return conn;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>  }<a name="line.805"></a>
-<span class="sourceLineNo">806</span><a name="line.806"></a>
-<span class="sourceLineNo">807</span>  /**<a name="line.807"></a>
-<span class="sourceLineNo">808</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.808"></a>
-<span class="sourceLineNo">809</span>   * @param c<a name="line.809"></a>
-<span class="sourceLineNo">810</span>   * @throws IOException<a name="line.810"></a>
-<span class="sourceLineNo">811</span>   */<a name="line.811"></a>
-<span class="sourceLineNo">812</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>    // check to see if the codec list is available:<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    if (codecs == null) return;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>    for (String codec : codecs) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.817"></a>
-<span class="sourceLineNo">818</span>        throw new IOException("Compression codec " + codec +<a name="line.818"></a>
-<span class="sourceLineNo">819</span>          " not supported, aborting RS construction");<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      }<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    }<a name="line.821"></a>
-<span class="sourceLineNo">822</span>  }<a name="line.822"></a>
-<span class="sourceLineNo">823</span><a name="line.823"></a>
-<span class="sourceLineNo">824</span>  public String getClusterId() {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>    return this.clusterId;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>  }<a name="line.826"></a>
-<span class="sourceLineNo">827</span><a name="line.827"></a>
-<span class="sourceLineNo">828</span>  /**<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * Setup our cluster connection if not already initialized.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   */<a name="line.830"></a>
-<span class="sourceLineNo">831</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>    if (clusterConnection == null) {<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      clusterConnection = createClusterConnection();<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      metaTableLocator = new MetaTableLocator();<a name="line.834"></a>
-<span class="sourceLineNo">835</span>    }<a name="line.835"></a>
-<span class="sourceLineNo">836</span>  }<a name="line.836"></a>
-<span class="sourceLineNo">837</span><a name="line.837"></a>
-<span class="sourceLineNo">838</span>  /**<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.839"></a>
-<span class="sourceLineNo">840</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.840"></a>
-<span class="sourceLineNo">841</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.841"></a>
-<span class="sourceLineNo">842</span>   */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>  private void preRegistrationInitialization() {<a name="line.843"></a>
-<span class="sourceLineNo">844</span>    try {<a name="line.844"></a>
-<span class="sourceLineNo">845</span>      initializeZooKeeper();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>      setupClusterConnection();<a name="line.846"></a>
-<span class="sourceLineNo">847</span>      // Setup RPC client for master communication<a name="line.847"></a>
-<span class="sourceLineNo">848</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    } catch (Throwable t) {<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      // Call stop if error or process will stick around for ever since server<a name="line.851"></a>
-<span class="sourceLineNo">852</span>      // puts up non-daemon threads.<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      this.rpcServices.stop();<a name="line.853"></a>
-<span class="sourceLineNo">854</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.854"></a>
-<span class="sourceLineNo">855</span>    }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>  }<a name="line.856"></a>
-<span class="sourceLineNo">857</span><a name="line.857"></a>
-<span class="sourceLineNo">858</span>  /**<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * &lt;p&gt;<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   * Finally open long-living server short-circuit connection.<a name="line.862"></a>
-<span class="sourceLineNo">863</span>   */<a name="line.863"></a>
-<span class="sourceLineNo">864</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    justification="cluster Id znode read would give us correct response")<a name="line.865"></a>
-<span class="sourceLineNo">866</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    // Nothing to do in here if no Master in the mix.<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    if (this.masterless) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      return;<a name="line.869"></a>
-<span class="sourceLineNo">870</span>    }<a name="line.870"></a>
-<span class="sourceLineNo">871</span><a name="line.871"></a>
-<span class="sourceLineNo">872</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.872"></a>
-<span class="sourceLineNo">873</span>    // block until a master is available.  No point in starting up if no master<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // running.<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.875"></a>
+<span class="sourceLineNo">778</span><a name="line.778"></a>
+<span class="sourceLineNo">779</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    if (LOG.isDebugEnabled()) {<a name="line.780"></a>
+<span class="sourceLineNo">781</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.781"></a>
+<span class="sourceLineNo">782</span>    }<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    return true;<a name="line.783"></a>
+<span class="sourceLineNo">784</span>  }<a name="line.784"></a>
+<span class="sourceLineNo">785</span><a name="line.785"></a>
+<span class="sourceLineNo">786</span>  /**<a name="line.786"></a>
+<span class="sourceLineNo">787</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.787"></a>
+<span class="sourceLineNo">788</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.788"></a>
+<span class="sourceLineNo">789</span>   */<a name="line.789"></a>
+<span class="sourceLineNo">790</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.790"></a>
+<span class="sourceLineNo">791</span>    Configuration conf = this.conf;<a name="line.791"></a>
+<span class="sourceLineNo">792</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.792"></a>
+<span class="sourceLineNo">793</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.793"></a>
+<span class="sourceLineNo">794</span>      // the conf and unset the client ZK related properties<a name="line.794"></a>
+<span class="sourceLineNo">795</span>      conf = new Configuration(this.conf);<a name="line.795"></a>
+<span class="sourceLineNo">796</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.796"></a>
+<span class="sourceLineNo">797</span>    }<a name="line.797"></a>
+<span class="sourceLineNo">798</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.798"></a>
+<span class="sourceLineNo">799</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.799"></a>
+<span class="sourceLineNo">800</span>    // and remote invocations.<a name="line.800"></a>
+<span class="sourceLineNo">801</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.801"></a>
+<span class="sourceLineNo">802</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.802"></a>
+<span class="sourceLineNo">803</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.803"></a>
+<span class="sourceLineNo">804</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.804"></a>
+<span class="sourceLineNo">805</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.805"></a>
+<span class="sourceLineNo">806</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.806"></a>
+<span class="sourceLineNo">807</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.807"></a>
+<span class="sourceLineNo">808</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.808"></a>
+<span class="sourceLineNo">809</span>    return conn;<a name="line.809"></a>
+<span class="sourceLineNo">810</span>  }<a name="line.810"></a>
+<span class="sourceLineNo">811</span><a name="line.811"></a>
+<span class="sourceLineNo">812</span>  /**<a name="line.812"></a>
+<span class="sourceLineNo">813</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.813"></a>
+<span class="sourceLineNo">814</span>   * @param c<a name="line.814"></a>
+<span class="sourceLineNo">815</span>   * @throws IOException<a name="line.815"></a>
+<span class="sourceLineNo">816</span>   */<a name="line.816"></a>
+<span class="sourceLineNo">817</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.817"></a>
+<span class="sourceLineNo">818</span>    // check to see if the codec list is available:<a name="line.818"></a>
+<span class="sourceLineNo">819</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.819"></a>
+<span class="sourceLineNo">820</span>    if (codecs == null) return;<a name="line.820"></a>
+<span class="sourceLineNo">821</span>    for (String codec : codecs) {<a name="line.821"></a>
+<span class="sourceLineNo">822</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.822"></a>
+<span class="sourceLineNo">823</span>        throw new IOException("Compression codec " + codec +<a name="line.823"></a>
+<span class="sourceLineNo">824</span>          " not supported, aborting RS construction");<a name="line.824"></a>
+<span class="sourceLineNo">825</span>      }<a name="line.825"></a>
+<span class="sourceLineNo">826</span>    }<a name="line.826"></a>
+<span class="sourceLineNo">827</span>  }<a name="line.827"></a>
+<span class="sourceLineNo">828</span><a name="line.828"></a>
+<span class="sourceLineNo">829</span>  public String getClusterId() {<a name="line.829"></a>
+<span class="sourceLineNo">830</span>    return this.clusterId;<a name="line.830"></a>
+<span class="sourceLineNo">831</span>  }<a name="line.831"></a>
+<span class="sourceLineNo">832</span><a name="line.832"></a>
+<span class="sourceLineNo">833</span>  /**<a name="line.833"></a>
+<span class="sourceLineNo">834</span>   * Setup our cluster connection if not already initialized.<a name="line.834"></a>
+<span class="sourceLineNo">835</span>   */<a name="line.835"></a>
+<span class="sourceLineNo">836</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.836"></a>
+<span class="sourceLineNo">837</span>    if (clusterConnection == null) {<a name="line.837"></a>
+<span class="sourceLineNo">838</span>      clusterConnection = createClusterConnection();<a name="line.838"></a>
+<span class="sourceLineNo">839</span>      metaTableLocator = new MetaTableLocator();<a name="line.839"></a>
+<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
+<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
+<span class="sourceLineNo">842</span><a name="line.842"></a>
+<span class="sourceLineNo">843</span>  /**<a name="line.843"></a>
+<span class="sourceLineNo">844</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.844"></a>
+<span class="sourceLineNo">845</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.845"></a>
+<span class="sourceLineNo">846</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.846"></a>
+<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
+<span class="sourceLineNo">848</span>  private void preRegistrationInitialization() {<a name="line.848"></a>
+<span class="sourceLineNo">849</span>    try {<a name="line.849"></a>
+<span class="sourceLineNo">850</span>      initializeZooKeeper();<a name="line.850"></a>
+<span class="sourceLineNo">851</span>      setupClusterConnection();<a name="line.851"></a>
+<span class="sourceLineNo">852</span>      // Setup RPC client for master communication<a name="line.852"></a>
+<span class="sourceLineNo">853</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.853"></a>
+<span class="sourceLineNo">854</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.854"></a>
+<span class="sourceLineNo">855</span>    } catch (Throwable t) {<a name="line.855"></a>
+<span class="sourceLineNo">856</span>      // Call stop if error or process will stick around for ever since server<a name="line.856"></a>
+<span class="sourceLineNo">857</span>      // puts up non-daemon threads.<a name="line.857"></a>
+<span class="sourceLineNo">858</span>      this.rpcServices.stop();<a name="line.858"></a>
+<span class="sourceLineNo">859</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.859"></a>
+<span class="sourceLineNo">860</span>    }<a name="line.860"></a>
+<span class="sourceLineNo">861</span>  }<a name="line.861"></a>
+<span class="sourceLineNo">862</span><a name="line.862"></a>
+<span class="sourceLineNo">863</span>  /**<a name="line.863"></a>
+<span class="sourceLineNo">864</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.864"></a>
+<span class="sourceLineNo">865</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.865"></a>
+<span class="sourceLineNo">866</span>   * &lt;p&gt;<a name="line.866"></a>
+<span class="sourceLineNo">867</span>   * Finally open long-living server short-circuit connection.<a name="line.867"></a>
+<span class="sourceLineNo">868</span>   */<a name="line.868"></a>
+<span class="sourceLineNo">869</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.869"></a>
+<span class="sourceLineNo">870</span>    justification="cluster Id znode read would give us correct response")<a name="line.870"></a>
+<span class="sourceLineNo">871</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.871"></a>
+<span class="sourceLineNo">872</span>    // Nothing to do in here if no Master in the mix.<a name="line.872"></a>
+<span class="sourceLineNo">873</span>    if (this.masterless) {<a name="line.873"></a>
+<span class="sourceLineNo">874</span>      return;<a name="line.874"></a>
+<span class="sourceLineNo">875</span>    }<a name="line.875"></a>
 <span class="sourceLineNo">876</span><a name="line.876"></a>
-<span class="sourceLineNo">877</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    // when ready.<a name="line.878"></a>
-<span class="sourceLineNo">879</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.881"></a>
-<span class="sourceLineNo">882</span>    if (clusterId == null) {<a name="line.882"></a>
-<span class="sourceLineNo">883</span>      // Retrieve clusterId<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      // Since cluster status is now up<a name="line.884"></a>
-<span class="sourceLineNo">885</span>      // ID should have already been set by HMaster<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      try {<a name="line.886"></a>
-<span class="sourceLineNo">887</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>        if (clusterId == null) {<a name="line.888"></a>
-<span class="sourceLineNo">889</span>          this.abort("Cluster ID has not been set");<a name="line.889"></a>
-<span class="sourceLineNo">890</span>        }<a name="line.890"></a>
-<span class="sourceLineNo">891</span>        LOG.info("ClusterId : " + clusterId);<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      } catch (KeeperException e) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.893"></a>
-<span class="sourceLineNo">894</span>      }<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    }<a name="line.895"></a>
-<span class="sourceLineNo">896</span><a name="line.896"></a>
-<span class="sourceLineNo">897</span>    waitForMasterActive();<a name="line.897"></a>
-<span class="sourceLineNo">898</span>    if (isStopped() || isAborted()) {<a name="line.898"></a>
-<span class="sourceLineNo">899</span>      return; // No need for further initialization<a name="line.899"></a>
+<span class="sourceLineNo">877</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.877"></a>
+<span class="sourceLineNo">878</span>    // block until a master is available.  No point in starting up if no master<a name="line.878"></a>
+<span class="sourceLineNo">879</span>    // running.<a name="line.879"></a>
+<span class="sourceLineNo">880</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.880"></a>
+<span class="sourceLineNo">881</span><a name="line.881"></a>
+<span class="sourceLineNo">882</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.882"></a>
+<span class="sourceLineNo">883</span>    // when ready.<a name="line.883"></a>
+<span class="sourceLineNo">884</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.884"></a>
+<span class="sourceLineNo">885</span><a name="line.885"></a>
+<span class="sourceLineNo">886</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.886"></a>
+<span class="sourceLineNo">887</span>    if (clusterId == null) {<a name="line.887"></a>
+<span class="sourceLineNo">888</span>      // Retrieve clusterId<a name="line.888"></a>
+<span class="sourceLineNo">889</span>      // Since cluster status is now up<a name="line.889"></a>
+<span class="sourceLineNo">890</span>      // ID should have already been set by HMaster<a name="line.890"></a>
+<span class="sourceLineNo">891</span>      try {<a name="line.891"></a>
+<span class="sourceLineNo">892</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.892"></a>
+<span class="sourceLineNo">893</span>        if (clusterId == null) {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>          this.abort("Cluster ID has not been set");<a name="line.894"></a>
+<span class="sourceLineNo">895</span>        }<a name="line.895"></a>
+<span class="sourceLineNo">896</span>        LOG.info("ClusterId : " + clusterId);<a name="line.896"></a>
+<span class="sourceLineNo">897</span>      } catch (KeeperException e) {<a name="line.897"></a>
+<span class="sourceLineNo">898</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.898"></a>
+<span class="sourceLineNo">899</span>      }<a name="line.899"></a>
 <span class="sourceLineNo">900</span>    }<a name="line.900"></a>
 <span class="sourceLineNo">901</span><a name="line.901"></a>
-<span class="sourceLineNo">902</span>    // watch for snapshots and other procedures<a name="line.902"></a>
-<span class="sourceLineNo">903</span>    try {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>      rspmHost.loadProcedures(conf);<a name="line.905"></a>
-<span class="sourceLineNo">906</span>      rspmHost.initialize(this);<a name="line.906"></a>
-<span class="sourceLineNo">907</span>    } catch (KeeperException e) {<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.908"></a>
-<span class="sourceLineNo">909</span>    }<a name="line.909"></a>
-<span class="sourceLineNo">910</span>  }<a name="line.910"></a>
-<span class="sourceLineNo">911</span><a name="line.911"></a>
-<span class="sourceLineNo">912</span>  /**<a name="line.912"></a>
-<span class="sourceLineNo">913</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.913"></a>
-<span class="sourceLineNo">914</span>   * if the region server is shut down<a name="line.914"></a>
-<span class="sourceLineNo">915</span>   * @param tracker znode tracker to use<a name="line.915"></a>
-<span class="sourceLineNo">916</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.916"></a>
-<span class="sourceLineNo">917</span>   * @throws InterruptedException<a name="line.917"></a>
-<span class="sourceLineNo">918</span>   */<a name="line.918"></a>
-<span class="sourceLineNo">919</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      throws IOException, InterruptedException {<a name="line.920"></a>
-<span class="sourceLineNo">921</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>      if (this.stopped) {<a name="line.922"></a>
-<span class="sourceLineNo">923</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      }<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span>  }<a name="line.926"></a>
-<span class="sourceLineNo">927</span><a name="line.927"></a>
-<span class="sourceLineNo">928</span>  /**<a name="line.928"></a>
-<span class="sourceLineNo">929</span>   * @return True if the cluster is up.<a name="line.929"></a>
-<span class="sourceLineNo">930</span>   */<a name="line.930"></a>
-<span class="sourceLineNo">931</span>  @Override<a name="line.931"></a>
-<span class="sourceLineNo">932</span>  public boolean isClusterUp() {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    return this.masterless ||<a name="line.933"></a>
-<span class="sourceLineNo">934</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.934"></a>
-<span class="sourceLineNo">935</span>  }<a name="line.935"></a>
-<span class="sourceLineNo">936</span><a name="line.936"></a>
-<span class="sourceLineNo">937</span>  /**<a name="line.937"></a>
-<span class="sourceLineNo">938</span>   * The HRegionServer sticks in this loop until closed.<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   */<a name="line.939"></a>
-<span class="sourceLineNo">940</span>  @Override<a name="line.940"></a>
-<span class="sourceLineNo">941</span>  public void run() {<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    try {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.943"></a>
-<span class="sourceLineNo">944</span>      preRegistrationInitialization();<a name="line.944"></a>
-<span class="sourceLineNo">945</span>    } catch (Throwable e) {<a name="line.945"></a>
-<span class="sourceLineNo">946</span>      abort("Fatal exception during initialization", e);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    try {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.951"></a>
-<span class="sourceLineNo">952</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.952"></a>
-<span class="sourceLineNo">953</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.953"></a>
-<span class="sourceLineNo">954</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      }<a name="line.955"></a>
-<span class="sourceLineNo">956</span><a name="line.956"></a>
-<span class="sourceLineNo">957</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.957"></a>
-<span class="sourceLineNo">958</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      LOG.debug("About to register with Master.");<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.961"></a>
-<span class="sourceLineNo">962</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.962"></a>
-<span class="sourceLineNo">963</span>      RetryCounter rc = rcf.create();<a name="line.963"></a>
-<span class="sourceLineNo">964</span>      while (keepLooping()) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.965"></a>
-<span class="sourceLineNo">966</span>        if (w == null) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.967"></a>
-<span class="sourceLineNo">968</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.968"></a>
-<span class="sourceLineNo">969</span>          this.sleeper.sleep(sleepTime);<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        } else {<a name="line.970"></a>
-<span class="sourceLineNo">971</span>          handleReportForDutyResponse(w);<a name="line.971"></a>
-<span class="sourceLineNo">972</span>          break;<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        }<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span><a name="line.975"></a>
-<span class="sourceLineNo">976</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>        // start the snapshot handler and other procedure handlers,<a name="line.977"></a>
-<span class="sourceLineNo">978</span>        // since the server is ready to run<a name="line.978"></a>
-<span class="sourceLineNo">979</span>        if (this.rspmHost != null) {<a name="line.979"></a>
-<span class="sourceLineNo">980</span>          this.rspmHost.start();<a name="line.980"></a>
-<span class="sourceLineNo">981</span>        }<a name="line.981"></a>
-<span class="sourceLineNo">982</span>        // Start the Quota Manager<a name="line.982"></a>
-<span class="sourceLineNo">983</span>        if (this.rsQuotaManager != null) {<a name="line.983"></a>
-<span class="sourceLineNo">984</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.984"></a>
-<span class="sourceLineNo">985</span>        }<a name="line.985"></a>
-<span class="sourceLineNo">986</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.986"></a>
-<span class="sourceLineNo">987</span>          this.rsSpaceQuotaManager.start();<a name="line.987"></a>
-<span class="sourceLineNo">988</span>        }<a name="line.988"></a>
-<span class="sourceLineNo">989</span>      }<a name="line.989"></a>
-<span class="sourceLineNo">990</span><a name="line.990"></a>
-<span class="sourceLineNo">991</span>      // We registered with the Master.  Go into run mode.<a name="line.991"></a>
-<span class="sourceLineNo">992</span>      long lastMsg = System.currentTimeMillis();<a name="line.992"></a>
-<span class="sourceLineNo">993</span>      long oldRequestCount = -1;<a name="line.993"></a>
-<span class="sourceLineNo">994</span>      // The main run loop.<a name="line.994"></a>
-<span class="sourceLineNo">995</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.995"></a>
-<span class="sourceLineNo">996</span>        if (!isClusterUp()) {<a name="line.996"></a>
-<span class="sourceLineNo">997</span>          if (isOnlineRegionsEmpty()) {<a name="line.997"></a>
-<span class="sourceLineNo">998</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.998"></a>
-<span class="sourceLineNo">999</span>          } else if (!this.stopping) {<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>            this.stopping = true;<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>            LOG.info("Closing user regions");<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>            closeUserRegions(this.abortRequested);<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>          } else if (this.stopping) {<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>            if (allUserRegionsOffline) {<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>              // Set stopped if no more write requests tp meta tables<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span>              // since last time we went around the loop.  Any open<a name="line.1007"></a>
-<span class="sourceLineNo">1008</span>              // meta regions will be closed on our way out.<a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>                break;<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>              }<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>              oldRequestCount = getWriteRequestCount();<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>            } else {<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>              // Make sure all regions have been closed -- some regions may<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>              // have not got it because we were splitting at the time of<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>              // the call to closeUserRegions.<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>              closeUserRegions(this.abortRequested);<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span>            }<a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>          }<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>        }<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span>        long now = System.currentTimeMillis();<a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>        if ((now - lastMsg) &gt;= msgInterval) {<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>          tryRegionServerReport(lastMsg, now);<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>          lastMsg = System.currentTimeMillis();<a name="line.1026"></a>
+<span class="sourceLineNo">902</span>    waitForMasterActive();<a name="line.902"></a>
+<span class="sourceLineNo">903</span>    if (isStopped() || isAborted()) {<a name="line.903"></a>
+<span class="sourceLineNo">904</span>      return; // No need for further initialization<a name="line.904"></a>
+<span class="sourceLineNo">905</span>    }<a name="line.905"></a>
+<span class="sourceLineNo">906</span><a name="line.906"></a>
+<span class="sourceLineNo">907</span>    // watch for snapshots and other procedures<a name="line.907"></a>
+<span class="sourceLineNo">908</span>    try {<a name="line.908"></a>
+<span class="sourceLineNo">909</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.909"></a>
+<span class="sourceLineNo">910</span>      rspmHost.loadProcedures(conf);<a name="line.910"></a>
+<span class="sourceLineNo">911</span>      rspmHost.initialize(this);<a name="line.911"></a>
+<span class="sourceLineNo">912</span>    } catch (KeeperException e) {<a name="line.912"></a>
+<span class="sourceLineNo">913</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.913"></a>
+<span class="sourceLineNo">914</span>    }<a name="line.914"></a>
+<span class="sourceLineNo">915</span>  }<a name="line.915"></a>
+<span class="sourceLineNo">916</span><a name="line.916"></a>
+<span class="sourceLineNo">917</span>  /**<a name="line.917"></a>
+<span class="sourceLineNo">918</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.918"></a>
+<span class="sourceLineNo">919</span>   * if the region server is shut down<a name="line.919"></a>
+<span class="sourceLineNo">920</span>   * @param tracker znode tracker to use<a name="line.920"></a>
+<span class="sourceLineNo">921</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.921"></a>
+<span class="sourceLineNo">922</span>   * @throws InterruptedException<a name="line.922"></a>
+<span class="sourceLineNo">923</span>   */<a name="line.923"></a>
+<span class="sourceLineNo">924</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.924"></a>
+<span class="sourceLineNo">925</span>      throws IOException, InterruptedException {<a name="line.925"></a>
+<span class="sourceLineNo">926</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.926"></a>
+<span class="sourceLineNo">927</span>      if (this.stopped) {<a name="line.927"></a>
+<span class="sourceLineNo">928</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.928"></a>
+<span class="sourceLineNo">929</span>      }<a name="line.929"></a>
+<span class="sourceLineNo">930</span>    }<a name="line.930"></a>
+<span class="sourceLineNo">931</span>  }<a name="line.931"></a>
+<span class="sourceLineNo">932</span><a name="line.932"></a>
+<span class="sourceLineNo">933</span>  /**<a name="line.933"></a>
+<span class="sourceLineNo">934</span>   * @return True if the cluster is up.<a name="line.934"></a>
+<span class="sourceLineNo">935</span>   */<a name="line.935"></a>
+<span class="sourceLineNo">936</span>  @Override<a name="line.936"></a>
+<span class="sourceLineNo">937</span>  public boolean isClusterUp() {<a name="line.937"></a>
+<span class="sourceLineNo">938</span>    return this.masterless ||<a name="line.938"></a>
+<span class="sourceLineNo">939</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.939"></a>
+<span class="sourceLineNo">940</span>  }<a name="line.940"></a>
+<span class="sourceLineNo">941</span><a name="line.941"></a>
+<span class="sourceLineNo">942</span>  /**<a name="line.942"></a>
+<span class="sourceLineNo">943</span>   * The HRegionServer sticks in this loop until closed.<a name="line.943"></a>
+<span class="sourceLineNo">944</span>   */<a name="line.944"></a>
+<span class="sourceLineNo">945</span>  @Override<a name="line.945"></a>
+<span class="sourceLineNo">946</span>  public void run() {<a name="line.946"></a>
+<span class="sourceLineNo">947</span>    try {<a name="line.947"></a>
+<span class="sourceLineNo">948</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.948"></a>
+<span class="sourceLineNo">949</span>      preRegistrationInitialization();<a name="line.949"></a>
+<span class="sourceLineNo">950</span>    } catch (Throwable e) {<a name="line.950"></a>
+<span class="sourceLineNo">951</span>      abort("Fatal exception during initialization", e);<a name="line.951"></a>
+<span class="sourceLineNo">952</span>    }<a name="line.952"></a>
+<span class="sourceLineNo">953</span><a name="line.953"></a>
+<span class="sourceLineNo">954</span>    try {<a name="line.954"></a>
+<span class="sourceLineNo">955</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.957"></a>
+<span class="sourceLineNo">958</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.958"></a>
+<span class="sourceLineNo">959</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.959"></a>
+<span class="sourceLineNo">960</span>      }<a name="line.960"></a>
+<span class="sourceLineNo">961</span><a name="line.961"></a>
+<span class="sourceLineNo">962</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.962"></a>
+<span class="sourceLineNo">963</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.963"></a>
+<span class="sourceLineNo">964</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.964"></a>
+<span class="sourceLineNo">965</span>      LOG.debug("About to register with Master.");<a name="line.965"></a>
+<span class="sourceLineNo">966</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.966"></a>
+<span class="sourceLineNo">967</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.967"></a>
+<span class="sourceLineNo">968</span>      RetryCounter rc = rcf.create();<a name="line.968"></a>
+<span class="sourceLineNo">969</span>      while (keepLooping()) {<a name="line.969"></a>
+<span class="sourceLineNo">970</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.970"></a>
+<span class="sourceLineNo">971</span>        if (w == null) {<a name="line.971"></a>
+<span class="sourceLineNo">972</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.972"></a>
+<span class="sourceLineNo">973</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.973"></a>
+<span class="sourceLineNo">974</span>          this.sleeper.sleep(sleepTime);<a name="line.974"></a>
+<span class="sourceLineNo">975</span>        } else {<a name="line.975"></a>
+<span class="sourceLineNo">976</span>          handleReportForDutyResponse(w);<a name="line.976"></a>
+<span class="sourceLineNo">977</span>          break;<a name="line.977"></a>
+<span class="sourceLineNo">978</span>        }<a name="line.978"></a>
+<span class="sourceLineNo">979</span>      }<a name="line.979"></a>
+<span class="sourceLineNo">980</span><a name="line.980"></a>
+<span class="sourceLineNo">981</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.981"></a>
+<span class="sourceLineNo">982</span>        // start the snapshot handler and other procedure handlers,<a name="line.982"></a>
+<span class="sourceLineNo">983</span>        // since the server is ready to run<a name="line.983"></a>
+<span class="sourceLineNo">984</span>        if (this.rspmHost != null) {<a name="line.984"></a>
+<span class="sourceLineNo">985</span>          this.rspmHost.start();<a name="line.985"></a>
+<span class="sourceLineNo">986</span>        }<a name="line.986"></a>
+<span class="sourceLineNo">987</span>        // Start the Quota Manager<a name="line.987"></a>
+<span class="sourceLineNo">988</span>        if (this.rsQuotaManager != null) {<a name="line.988"></a>
+<span class="sourceLineNo">989</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.989"></a>
+<span class="sourceLineNo">990</span>        }<a name="line.990"></a>
+<span class="sourceLineNo">991</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.991"></a>
+<span class="sourceLineNo">992</span>          this.rsSpaceQuotaManager.start();<a name="line.992"></a>
+<span class="sourceLineNo">993</span>        }<a name="line.993"></a>
+<span class="sourceLineNo">994</span>      }<a name="line.994"></a>
+<span class="sourceLineNo">995</span><a name="line.995"></a>
+<span class="sourceLineNo">996</span>      // We registered with the Master.  Go into run mode.<a name="line.996"></a>
+<span class="sourceLineNo">997</span>      long lastMsg = System.currentTimeMillis();<a name="line.997"></a>
+<span class="sourceLineNo">998</span>      long oldRequestCount = -1;<a name="line.998"></a>
+<span class="sourceLineNo">999</span>      // The main run loop.<a name="line.999"></a>
+<span class="sourceLineNo">1000</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.1000"></a>
+<span class="sourceLineNo">1001</span>        if (!isClusterUp()) {<a name="line.1001"></a>
+<span class="sourceLineNo">1002</span>          if (isOnlineRegionsEmpty()) {<a name="line.1002"></a>
+<span class="sourceLineNo">1003</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.1003"></a>
+<span class="sourceLineNo">1004</span>          } else if (!this.stopping) {<a name="line.1004"></a>
+<span class="sourceLineNo">1005</span>            this.stopping = true;<a name="line.1005"></a>
+<span class="sourceLineNo">1006</span>            LOG.info("Closing user regions");<a name="line.1006"></a>
+<span class="sourceLineNo">1007</span>            closeUserRegions(this.abortRequested);<a name="line.1007"></a>
+<span class="sourceLineNo">1008</span>          } else if (this.stopping) {<a name="line.1008"></a>
+<span class="sourceLineNo">1009</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1009"></a>
+<span class="sourceLineNo">1010</span>            if (allUserRegionsOffline) {<a name="line.1010"></a>
+<span class="sourceLineNo">1011</span>              // Set stopped if no more write requests tp meta tables<a name="line.1011"></a>
+<span class="sourceLineNo">1012</span>              // since last time we went around the loop.  Any open<a name="line.1012"></a>
+<span class="sourceLineNo">1013</span>              // meta regions will be closed on our way out.<a name="line.1013"></a>
+<span class="sourceLineNo">1014</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1014"></a>
+<span class="sourceLineNo">1015</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1015"></a>
+<span class="sourceLineNo">1016</span>                break;<a name="line.1016"></a>
+<span class="sourceLineNo">1017</span>              }<a name="line.1017"></a>
+<span class="sourceLineNo">1018</span>              oldRequestCount = getWriteRequestCount();<a name="line.1018"></a>
+<span class="sourceLineNo">1019</span>            } else {<a name="line.1019"></a>
+<span class="sourceLineNo">1020</span>              // Make sure all regions have been closed -- some regions may<a name="line.1020"></a>
+<span class="sourceLineNo">1021</span>              // have not got it because we were splitting at the time of<a name="line.1021"></a>
+<span class="sourceLineNo">1022</span>              // the call to closeUserRegions.<a name="line.1022"></a>
+<span class="sourceLineNo">1023</span>              closeUserRegions(this.abortRequested);<a name="line.1023"></a>
+<span class="sourceLineNo">1024</span>            }<a name="line.1024"></a>
+<span class="sourceLineNo">1025</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1025"></a>
+<span class="sourceLineNo">1026</span>          }<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        }<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>        if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          this.sleeper.sleep();<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>        }<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>      } // for<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>    } catch (Throwable t) {<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>      if (!rpcServices.checkOOME(t)) {<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>        String prefix = t instanceof YouAreDeadException? "": "Unhandled: ";<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        abort(prefix + t.getMessage(), t);<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>      }<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>    }<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span><a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>    if (abortRequested) {<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>      Timer abortMonitor = new Timer("Abort regionserver monitor", true);<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>      TimerTask abortTimeoutTask = null;<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>      try {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>        abortTimeoutTask =<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>            Class.forName(conf.get(ABORT_TIMEOUT_TASK, SystemExitWhenAbortTimeout.class.getName()))<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>                .asSubclass(TimerTask.class).getDeclaredConstructor().newInstance();<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>      } catch (Exception e) {<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        LOG.warn("Initialize abort timeout task failed", e);<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>      }<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>      if (abortTimeoutTask != null) {<a name="l

<TRUNCATED>

[24/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/checkstyle.rss
----------------------------------------------------------------------
diff --git a/checkstyle.rss b/checkstyle.rss
index a0f1fd1..777e301 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
     <copyright>&#169;2007 - 2018 The Apache Software Foundation</copyright>
     <item>
       <title>File: 3815,
-             Errors: 14837,
+             Errors: 14796,
              Warnings: 0,
              Infos: 0
       </title>
@@ -3107,7 +3107,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  9
+                  6
                 </td>
               </tr>
                           <tr>
@@ -18885,7 +18885,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  2
+                  0
                 </td>
               </tr>
                           <tr>
@@ -43875,7 +43875,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  5
+                  2
                 </td>
               </tr>
                           <tr>
@@ -48285,7 +48285,7 @@ under the License.
                   0
                 </td>
                 <td>
-                  41
+                  8
                 </td>
               </tr>
                           <tr>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/coc.html
----------------------------------------------------------------------
diff --git a/coc.html b/coc.html
index 3a38d31..b5b32df 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; 
       Code of Conduct Policy
@@ -385,7 +385,7 @@ email to <a class="externalLink" href="mailto:private@hbase.apache.org">the priv
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/dependencies.html
----------------------------------------------------------------------
diff --git a/dependencies.html b/dependencies.html
index dc5f088..0d0958c 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependencies</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -450,7 +450,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/dependency-convergence.html
----------------------------------------------------------------------
diff --git a/dependency-convergence.html b/dependency-convergence.html
index c8bdc91..456c766 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Reactor Dependency Convergence</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -680,7 +680,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/dependency-info.html
----------------------------------------------------------------------
diff --git a/dependency-info.html b/dependency-info.html
index 421412f..e1e3b7f 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Dependency Information</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -323,7 +323,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/dependency-management.html
----------------------------------------------------------------------
diff --git a/dependency-management.html b/dependency-management.html
index 434bdef..276b204 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013; Project Dependency Management</title>
     <link rel="stylesheet" href="./css/apache-maven-fluido-1.4-HBase.min.css" />
@@ -1009,7 +1009,7 @@
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/constant-values.html
----------------------------------------------------------------------
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index fffed85..08fc418 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3831,7 +3831,7 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#date">date</a></code></td>
-<td class="colLast"><code>"Mon Nov 26 14:44:10 UTC 2018"</code></td>
+<td class="colLast"><code>"Tue Nov 27 14:43:49 UTC 2018"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.revision">
@@ -3845,7 +3845,7 @@
 <!--   -->
 </a><code>public&nbsp;static&nbsp;final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a></code></td>
 <td><code><a href="org/apache/hadoop/hbase/Version.html#srcChecksum">srcChecksum</a></code></td>
-<td class="colLast"><code>"4d3ee036754273b7c20a302784193204"</code></td>
+<td class="colLast"><code>"17dd2a5425ebe5b21808fd50b5751598"</code></td>
 </tr>
 <tr class="rowColor">
 <td class="colFirst"><a name="org.apache.hadoop.hbase.Version.url">

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/index-all.html
----------------------------------------------------------------------
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 09e884f..e2a2eb2 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -8231,10 +8231,6 @@
 <div class="block">Create a cache configuration using the specified configuration object and
  defaults for family level settings.</div>
 </dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CacheConfig-org.apache.hadoop.conf.Configuration-boolean-">CacheConfig(Configuration, boolean)</a></span> - Constructor for class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
-<dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CacheConfig-org.apache.hadoop.conf.Configuration-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-">CacheConfig(Configuration, boolean, boolean, boolean, boolean, boolean, boolean, boolean, boolean, boolean, boolean)</a></span> - Constructor for class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CacheConfig-org.apache.hadoop.hbase.io.hfile.BlockCache-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-">CacheConfig(BlockCache, boolean, boolean, boolean, boolean, boolean, boolean, boolean, boolean, boolean)</a></span> - Constructor for class org.apache.hadoop.hbase.io.hfile.<a href="org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a></dt>
 <dd>
 <div class="block">Create a block cache configuration with the specified cache and configuration parameters.</div>
@@ -75960,8 +75956,6 @@
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/mob/MobCacheConfig.html#MobCacheConfig-org.apache.hadoop.conf.Configuration-">MobCacheConfig(Configuration)</a></span> - Constructor for class org.apache.hadoop.hbase.mob.<a href="org/apache/hadoop/hbase/mob/MobCacheConfig.html" title="class in org.apache.hadoop.hbase.mob">MobCacheConfig</a></dt>
 <dd>&nbsp;</dd>
-<dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/mob/MobCacheConfig.html#MobCacheConfig-org.apache.hadoop.conf.Configuration-boolean-">MobCacheConfig(Configuration, boolean)</a></span> - Constructor for class org.apache.hadoop.hbase.mob.<a href="org/apache/hadoop/hbase/mob/MobCacheConfig.html" title="class in org.apache.hadoop.hbase.mob">MobCacheConfig</a></dt>
-<dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HMobStore.html#mobCacheConfig">mobCacheConfig</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HMobStore.html" title="class in org.apache.hadoop.hbase.regionserver">HMobStore</a></dt>
 <dd>&nbsp;</dd>
 <dt><span class="memberNameLink"><a href="org/apache/hadoop/hbase/regionserver/HRegionServer.html#mobCacheConfig">mobCacheConfig</a></span> - Variable in class org.apache.hadoop.hbase.regionserver.<a href="org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 38f7cc7..155a44c 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -169,8 +169,8 @@
 <ul>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupType.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupRestoreConstants.BackupCommand</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.backup.<a href="../../../../../org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html" title="enum in org.apache.hadoop.hbase.backup"><span class="typeNameLink">BackupInfo.BackupPhase</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index a9f1b7a..d5c9cbe 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -555,24 +555,24 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/TableState.State.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">TableState.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/SnapshotType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">SnapshotType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncProcessTask.SubmittedRows.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncProcessTask.SubmittedRows</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AbstractResponse.ResponseType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AbstractResponse.ResponseType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RequestController.ReturnCode.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RequestController.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactionState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactionState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/IsolationLevel.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">IsolationLevel</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.Retry.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncRequestFutureImpl.Retry</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Durability.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Durability</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MobCompactPartitionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Scan.ReadType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Scan.ReadType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/ScannerCallable.MoreResults.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">ScannerCallable.MoreResults</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/RegionLocateType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">RegionLocateType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/Consistency.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">Consistency</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/MasterSwitchType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">MasterSwitchType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.client.<a href="../../../../../org/apache/hadoop/hbase/client/CompactType.html" title="enum in org.apache.hadoop.hbase.client"><span class="typeNameLink">CompactType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
index e67b778..329f62f 100644
--- a/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/package-tree.html
@@ -201,8 +201,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.MetaTableOps.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">MetaTableMetrics.MetaTableOps</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">RegionObserver.MutationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.coprocessor.<a href="../../../../../org/apache/hadoop/hbase/coprocessor/MetaTableMetrics.MetaTableOps.html" title="enum in org.apache.hadoop.hbase.coprocessor"><span class="typeNameLink">MetaTableMetrics.MetaTableOps</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index c859e49..b4eee18 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -183,14 +183,14 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterList.Operator.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterList.Operator</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">CompareFilter.CompareOp</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/RegexStringComparator.EngineType.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">RegexStringComparator.EngineType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.Order.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.Order</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FilterWrapper.FilterRowRetCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FilterWrapper.FilterRowRetCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/BitComparator.BitwiseOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">BitComparator.BitwiseOp</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/FuzzyRowFilter.SatisfiesCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">FuzzyRowFilter.SatisfiesCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/Filter.ReturnCode.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">Filter.ReturnCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.filter.<a href="../../../../../org/apache/hadoop/hbase/filter/CompareFilter.CompareOp.html" title="enum in org.apache.hadoop.hbase.filter"><span class="typeNameLink">CompareFilter.CompareOp</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
index 804e520..e249ca2 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/CacheConfig.html
@@ -429,25 +429,6 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr class="altColor">
 <td class="colFirst"><code>&nbsp;</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CacheConfig-org.apache.hadoop.conf.Configuration-boolean-">CacheConfig</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
-           boolean&nbsp;enableBlockCache)</code>&nbsp;</td>
-</tr>
-<tr class="rowColor">
-<td class="colFirst"><code>private </code></td>
-<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CacheConfig-org.apache.hadoop.conf.Configuration-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-">CacheConfig</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
-           boolean&nbsp;enableBlockCache,
-           boolean&nbsp;cacheDataOnRead,
-           boolean&nbsp;inMemory,
-           boolean&nbsp;cacheDataOnWrite,
-           boolean&nbsp;cacheIndexesOnWrite,
-           boolean&nbsp;cacheBloomsOnWrite,
-           boolean&nbsp;evictOnClose,
-           boolean&nbsp;cacheDataCompressed,
-           boolean&nbsp;prefetchOnOpen,
-           boolean&nbsp;dropBehindCompaction)</code>&nbsp;</td>
-</tr>
-<tr class="altColor">
-<td class="colFirst"><code>&nbsp;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html#CacheConfig-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.client.ColumnFamilyDescriptor-">CacheConfig</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
            <a href="../../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;family)</code>
 <div class="block">Create a cache configuration using the specified configuration object and
@@ -1121,7 +1102,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>GLOBAL_BLOCK_CACHE_INSTANCE</h4>
-<pre>static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.531">GLOBAL_BLOCK_CACHE_INSTANCE</a></pre>
+<pre>static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.505">GLOBAL_BLOCK_CACHE_INSTANCE</a></pre>
 <div class="block">Static reference to the block cache, or null if no caching should be used
  at all.</div>
 </li>
@@ -1132,7 +1113,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>ONHEAP_CACHE_INSTANCE</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.532">ONHEAP_CACHE_INSTANCE</a></pre>
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.506">ONHEAP_CACHE_INSTANCE</a></pre>
 </li>
 </ul>
 <a name="L2_CACHE_INSTANCE">
@@ -1141,7 +1122,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>L2_CACHE_INSTANCE</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.533">L2_CACHE_INSTANCE</a></pre>
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a> <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.507">L2_CACHE_INSTANCE</a></pre>
 </li>
 </ul>
 <a name="blockCacheDisabled">
@@ -1150,7 +1131,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>blockCacheDisabled</h4>
-<pre>static&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.537">blockCacheDisabled</a></pre>
+<pre>static&nbsp;boolean <a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.511">blockCacheDisabled</a></pre>
 <div class="block">Boolean whether we have disabled the block cache entirely.</div>
 </li>
 </ul>
@@ -1197,42 +1178,13 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </dl>
 </li>
 </ul>
-<a name="CacheConfig-org.apache.hadoop.conf.Configuration-boolean-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>CacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.251">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
-                   boolean&nbsp;enableBlockCache)</pre>
-</li>
-</ul>
-<a name="CacheConfig-org.apache.hadoop.conf.Configuration-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-">
-<!--   -->
-</a>
-<ul class="blockList">
-<li class="blockList">
-<h4>CacheConfig</h4>
-<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.266">CacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
-                    boolean&nbsp;enableBlockCache,
-                    boolean&nbsp;cacheDataOnRead,
-                    boolean&nbsp;inMemory,
-                    boolean&nbsp;cacheDataOnWrite,
-                    boolean&nbsp;cacheIndexesOnWrite,
-                    boolean&nbsp;cacheBloomsOnWrite,
-                    boolean&nbsp;evictOnClose,
-                    boolean&nbsp;cacheDataCompressed,
-                    boolean&nbsp;prefetchOnOpen,
-                    boolean&nbsp;dropBehindCompaction)</pre>
-</li>
-</ul>
 <a name="CacheConfig-org.apache.hadoop.hbase.io.hfile.BlockCache-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-boolean-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.304">CacheConfig</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;blockCache,
+<pre><a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.278">CacheConfig</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;blockCache,
             boolean&nbsp;cacheDataOnRead,
             boolean&nbsp;inMemory,
             boolean&nbsp;cacheDataOnWrite,
@@ -1266,7 +1218,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.326">CacheConfig</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
+<pre>public&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.300">CacheConfig</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheConfig</a>&nbsp;cacheConf)</pre>
 <div class="block">Constructs a cache configuration copied from the specified configuration.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1280,7 +1232,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CacheConfig</h4>
-<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.334">CacheConfig</a>()</pre>
+<pre>private&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.308">CacheConfig</a>()</pre>
 </li>
 </ul>
 </li>
@@ -1297,7 +1249,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isBlockCacheEnabled</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.341">isBlockCacheEnabled</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.315">isBlockCacheEnabled</a>()</pre>
 <div class="block">Checks whether the block cache is enabled.</div>
 </li>
 </ul>
@@ -1307,7 +1259,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getBlockCache</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.349">getBlockCache</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.323">getBlockCache</a>()</pre>
 <div class="block">Returns the block cache.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
@@ -1321,7 +1273,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheDataOnRead</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.358">shouldCacheDataOnRead</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.332">shouldCacheDataOnRead</a>()</pre>
 <div class="block">Returns whether the DATA blocks of this HFile should be cached on read or not (we always
  cache the meta blocks, the INDEX and BLOOM blocks).</div>
 <dl>
@@ -1336,7 +1288,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldDropBehindCompaction</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.362">shouldDropBehindCompaction</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.336">shouldDropBehindCompaction</a>()</pre>
 </li>
 </ul>
 <a name="shouldCacheBlockOnRead-org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory-">
@@ -1345,7 +1297,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheBlockOnRead</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.371">shouldCacheBlockOnRead</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.345">shouldCacheBlockOnRead</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
 <div class="block">Should we cache a block of a particular category? We always cache
  important blocks such as index blocks, as long as the block cache is
  available.</div>
@@ -1357,7 +1309,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isInMemory</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.384">isInMemory</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.358">isInMemory</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if blocks in this file should be flagged as in-memory</dd>
@@ -1370,7 +1322,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheDataOnWrite</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.392">shouldCacheDataOnWrite</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.366">shouldCacheDataOnWrite</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if data blocks should be written to the cache when an HFile is
@@ -1384,7 +1336,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setCacheDataOnWrite</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.402">setCacheDataOnWrite</a>(boolean&nbsp;cacheDataOnWrite)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.376">setCacheDataOnWrite</a>(boolean&nbsp;cacheDataOnWrite)</pre>
 <div class="block">Only used for testing.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1399,7 +1351,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheIndexesOnWrite</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.410">shouldCacheIndexesOnWrite</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.384">shouldCacheIndexesOnWrite</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if index blocks should be written to the cache when an HFile
@@ -1413,7 +1365,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheBloomsOnWrite</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.418">shouldCacheBloomsOnWrite</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.392">shouldCacheBloomsOnWrite</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if bloom blocks should be written to the cache when an HFile
@@ -1427,7 +1379,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldEvictOnClose</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.426">shouldEvictOnClose</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.400">shouldEvictOnClose</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if blocks should be evicted from the cache when an HFile
@@ -1441,7 +1393,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>setEvictOnClose</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.435">setEvictOnClose</a>(boolean&nbsp;evictOnClose)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.409">setEvictOnClose</a>(boolean&nbsp;evictOnClose)</pre>
 <div class="block">Only used for testing.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -1456,7 +1408,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheDataCompressed</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.442">shouldCacheDataCompressed</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.416">shouldCacheDataCompressed</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if data blocks should be compressed in the cache, false if not</dd>
@@ -1469,7 +1421,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldCacheCompressed</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.449">shouldCacheCompressed</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.423">shouldCacheCompressed</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType.BlockCategory</a>&nbsp;category)</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if this <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><code>BlockType.BlockCategory</code></a> should be compressed in blockcache, false otherwise</dd>
@@ -1482,7 +1434,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldPrefetchOnOpen</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.462">shouldPrefetchOnOpen</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.436">shouldPrefetchOnOpen</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>true if blocks should be prefetched into the cache on open, false if not</dd>
@@ -1495,7 +1447,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldReadBlockFromCache</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.474">shouldReadBlockFromCache</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.448">shouldReadBlockFromCache</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
 <div class="block">Return true if we may find this type of block in block cache.
  <p>
  TODO: today <code>family.isBlockCacheEnabled()</code> only means <code>cacheDataOnRead</code>, so here we
@@ -1510,7 +1462,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>shouldLockOnCacheMiss</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.501">shouldLockOnCacheMiss</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.475">shouldLockOnCacheMiss</a>(<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile">BlockType</a>&nbsp;blockType)</pre>
 <div class="block">If we make sure the block could not be cached, we will not acquire the lock
  otherwise we will acquire lock</div>
 </li>
@@ -1521,7 +1473,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>toString</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.509">toString</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.483">toString</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Overrides:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--" title="class or interface in java.lang">toString</a></code>&nbsp;in class&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></dd>
@@ -1534,7 +1486,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnHeapCache</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.543">getOnHeapCache</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.517">getOnHeapCache</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>c</code> - Configuration to use.</dd>
@@ -1549,7 +1501,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnHeapCacheStats</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheStats</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.547">getOnHeapCacheStats</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheStats</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.521">getOnHeapCacheStats</a>()</pre>
 </li>
 </ul>
 <a name="getL2CacheStats--">
@@ -1558,7 +1510,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getL2CacheStats</h4>
-<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheStats</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.554">getL2CacheStats</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheStats.html" title="class in org.apache.hadoop.hbase.io.hfile">CacheStats</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.528">getL2CacheStats</a>()</pre>
 </li>
 </ul>
 <a name="getOnHeapCacheInternal-org.apache.hadoop.conf.Configuration-">
@@ -1567,7 +1519,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnHeapCacheInternal</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.565">getOnHeapCacheInternal</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/LruBlockCache.html" title="class in org.apache.hadoop.hbase.io.hfile">LruBlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.539">getOnHeapCacheInternal</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
 <dd><code>c</code> - Configuration to use.</dd>
@@ -1582,7 +1534,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getExternalBlockcache</h4>
-<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.581">getExternalBlockcache</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
+<pre>private static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.555">getExternalBlockcache</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
 </li>
 </ul>
 <a name="getBucketCache-org.apache.hadoop.conf.Configuration-">
@@ -1591,7 +1543,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getBucketCache</h4>
-<pre>static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.611">getBucketCache</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
+<pre>static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html" title="class in org.apache.hadoop.hbase.io.hfile.bucket">BucketCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.585">getBucketCache</a>(org.apache.hadoop.conf.Configuration&nbsp;c)</pre>
 </li>
 </ul>
 <a name="instantiateBlockCache-org.apache.hadoop.conf.Configuration-">
@@ -1600,7 +1552,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>instantiateBlockCache</h4>
-<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.671">instantiateBlockCache</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public static&nbsp;<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</a>&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.645">instantiateBlockCache</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Returns the block cache or <code>null</code> in case none should be used.
  Sets GLOBAL_BLOCK_CACHE_INSTANCE</div>
 <dl>
@@ -1617,7 +1569,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>clearGlobalInstances</h4>
-<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.699">clearGlobalInstances</a>()</pre>
+<pre>static&nbsp;void&nbsp;<a href="../../../../../../src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.html#line.679">clearGlobalInstances</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index 985a0b3..3534131 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -274,12 +274,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/HFileBlock.Writer.State.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">HFileBlock.Writer.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Cacheable.MemoryType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.BlockCategory.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType.BlockCategory</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/Cacheable.MemoryType.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">Cacheable.MemoryType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">CacheConfig.ExternalBlockCaches</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.io.hfile.<a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockPriority.html" title="enum in org.apache.hadoop.hbase.io.hfile"><span class="typeNameLink">BlockPriority</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index c12ae36..91a2ed4 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -353,9 +353,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/CallEvent.Type.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">CallEvent.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/BufferCallBeforeInitHandler.BufferCallAction.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">BufferCallBeforeInitHandler.BufferCallAction</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.ipc.<a href="../../../../../org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.SourceStorage.html" title="enum in org.apache.hadoop.hbase.ipc"><span class="typeNameLink">MetricsHBaseServerSourceFactoryImpl.SourceStorage</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index a1882cb..2061ab3 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -294,8 +294,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/SyncTable.SyncMapper.Counter.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">SyncTable.SyncMapper.Counter</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/RowCounter.RowCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">RowCounter.RowCounterMapper.Counters</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/TableSplit.Version.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">TableSplit.Version</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.mapreduce.<a href="../../../../../org/apache/hadoop/hbase/mapreduce/CellCounter.CellCounterMapper.Counters.html" title="enum in org.apache.hadoop.hbase.mapreduce"><span class="typeNameLink">CellCounter.CellCounterMapper.Counters</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
index abd92a9..1799704 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/package-tree.html
@@ -198,8 +198,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.balancer.<a href="../../../../../../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html" title="enum in org.apache.hadoop.hbase.master.balancer"><span class="typeNameLink">BaseLoadBalancer.Cluster.LocalityType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.balancer.<a href="../../../../../../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html" title="enum in org.apache.hadoop.hbase.master.balancer"><span class="typeNameLink">BaseLoadBalancer.Cluster.Action.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.balancer.<a href="../../../../../../org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html" title="enum in org.apache.hadoop.hbase.master.balancer"><span class="typeNameLink">BaseLoadBalancer.Cluster.LocalityType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index 14234ea..f21ad9b 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -347,11 +347,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MetricsMasterSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MetricsMasterSourceFactoryImpl.FactoryStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/RegionState.State.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">RegionState.State</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.ResubmitDirective.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.ResubmitDirective</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/SplitLogManager.TerminationStatus.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">SplitLogManager.TerminationStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.<a href="../../../../../org/apache/hadoop/hbase/master/MasterRpcServices.BalanceSwitchMode.html" title="enum in org.apache.hadoop.hbase.master"><span class="typeNameLink">MasterRpcServices.BalanceSwitchMode</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 1dd5d5a..f76d995 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -216,9 +216,9 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/TableProcedureInterface.TableOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">TableProcedureInterface.TableOperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/ServerProcedureInterface.ServerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">ServerProcedureInterface.ServerOperationType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/MetaProcedureInterface.MetaOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">MetaProcedureInterface.MetaOperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.master.procedure.<a href="../../../../../../org/apache/hadoop/hbase/master/procedure/PeerProcedureInterface.PeerOperationType.html" title="enum in org.apache.hadoop.hbase.master.procedure"><span class="typeNameLink">PeerProcedureInterface.PeerOperationType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/mob/MobCacheConfig.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/mob/MobCacheConfig.html b/devapidocs/org/apache/hadoop/hbase/mob/MobCacheConfig.html
index 83b0eb9..7fec611 100644
--- a/devapidocs/org/apache/hadoop/hbase/mob/MobCacheConfig.html
+++ b/devapidocs/org/apache/hadoop/hbase/mob/MobCacheConfig.html
@@ -165,10 +165,6 @@ extends <a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.htm
 <td class="colOne"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/mob/MobCacheConfig.html#MobCacheConfig-org.apache.hadoop.conf.Configuration-">MobCacheConfig</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf)</code>&nbsp;</td>
 </tr>
 <tr class="rowColor">
-<td class="colOne"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/mob/MobCacheConfig.html#MobCacheConfig-org.apache.hadoop.conf.Configuration-boolean-">MobCacheConfig</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
-              boolean&nbsp;needBlockCache)</code>&nbsp;</td>
-</tr>
-<tr class="altColor">
 <td class="colOne"><code><span class="memberNameLink"><a href="../../../../../org/apache/hadoop/hbase/mob/MobCacheConfig.html#MobCacheConfig-org.apache.hadoop.conf.Configuration-org.apache.hadoop.hbase.client.ColumnFamilyDescriptor-">MobCacheConfig</a></span>(org.apache.hadoop.conf.Configuration&nbsp;conf,
               <a href="../../../../../org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.html" title="interface in org.apache.hadoop.hbase.client">ColumnFamilyDescriptor</a>&nbsp;family)</code>&nbsp;</td>
 </tr>
@@ -258,20 +254,10 @@ extends <a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.htm
 <a name="MobCacheConfig-org.apache.hadoop.conf.Configuration-">
 <!--   -->
 </a>
-<ul class="blockList">
-<li class="blockList">
-<h4>MobCacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html#line.40">MobCacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
-</li>
-</ul>
-<a name="MobCacheConfig-org.apache.hadoop.conf.Configuration-boolean-">
-<!--   -->
-</a>
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MobCacheConfig</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html#line.45">MobCacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
-                      boolean&nbsp;needBlockCache)</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html#line.40">MobCacheConfig</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 </li>
@@ -288,7 +274,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.htm
 <ul class="blockList">
 <li class="blockList">
 <h4>instantiateMobFileCache</h4>
-<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/mob/MobFileCache.html" title="class in org.apache.hadoop.hbase.mob">MobFileCache</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html#line.55">instantiateMobFileCache</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>public static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/mob/MobFileCache.html" title="class in org.apache.hadoop.hbase.mob">MobFileCache</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html#line.50">instantiateMobFileCache</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 <div class="block">Instantiates the MobFileCache.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -304,7 +290,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/io/hfile/CacheConfig.htm
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMobFileCache</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/mob/MobFileCache.html" title="class in org.apache.hadoop.hbase.mob">MobFileCache</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html#line.66">getMobFileCache</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/mob/MobFileCache.html" title="class in org.apache.hadoop.hbase.mob">MobFileCache</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/mob/MobCacheConfig.html#line.61">getMobFileCache</a>()</pre>
 <div class="block">Gets the MobFileCache.</div>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
index 7b435d2..4b549da 100644
--- a/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/monitoring/package-tree.html
@@ -127,8 +127,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">MonitoredTask.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/TaskMonitor.TaskFilter.TaskType.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">TaskMonitor.TaskFilter.TaskType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.monitoring.<a href="../../../../../org/apache/hadoop/hbase/monitoring/MonitoredTask.State.html" title="enum in org.apache.hadoop.hbase.monitoring"><span class="typeNameLink">MonitoredTask.State</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index 6168a3c..a4a3612 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -432,19 +432,19 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/ClusterMetrics.Option.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">ClusterMetrics.Option</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompareOperator.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompareOperator</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HConstants.OperationStatusCode</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MemoryCompactionPolicy.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MemoryCompactionPolicy</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CompatibilitySingletonFactory.SingletonStorage.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CompatibilitySingletonFactory.SingletonStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeepDeletedCells.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeepDeletedCells</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Coprocessor.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">MetaTableAccessor.QueryType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/KeyValue.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">KeyValue.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/CellBuilderType.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">CellBuilderType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">HealthChecker.HealthCheckerExitStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Cell.Type.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Cell.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.<a href="../../../../org/apache/hadoop/hbase/Size.Unit.html" title="enum in org.apache.hadoop.hbase"><span class="typeNameLink">Size.Unit</span></a></li>
 </ul>
 </li>
 </ul>


[12/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index 6369c27..ea05301 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -603,3251 +603,3256 @@
 <span class="sourceLineNo">595</span>      // init superusers and add the server principal (if using security)<a name="line.595"></a>
 <span class="sourceLineNo">596</span>      // or process owner as default super user.<a name="line.596"></a>
 <span class="sourceLineNo">597</span>      Superusers.initialize(conf);<a name="line.597"></a>
-<span class="sourceLineNo">598</span><a name="line.598"></a>
-<span class="sourceLineNo">599</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.599"></a>
+<span class="sourceLineNo">598</span>      regionServerAccounting = new RegionServerAccounting(conf);<a name="line.598"></a>
+<span class="sourceLineNo">599</span><a name="line.599"></a>
 <span class="sourceLineNo">600</span>      boolean isMasterNotCarryTable =<a name="line.600"></a>
 <span class="sourceLineNo">601</span>          this instanceof HMaster &amp;&amp; !LoadBalancer.isTablesOnMaster(conf);<a name="line.601"></a>
-<span class="sourceLineNo">602</span>      cacheConfig = new CacheConfig(conf, !isMasterNotCarryTable);<a name="line.602"></a>
-<span class="sourceLineNo">603</span>      mobCacheConfig = new MobCacheConfig(conf, !isMasterNotCarryTable);<a name="line.603"></a>
-<span class="sourceLineNo">604</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.604"></a>
-<span class="sourceLineNo">605</span>        @Override<a name="line.605"></a>
-<span class="sourceLineNo">606</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.606"></a>
-<span class="sourceLineNo">607</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.607"></a>
-<span class="sourceLineNo">608</span>        }<a name="line.608"></a>
-<span class="sourceLineNo">609</span>      };<a name="line.609"></a>
-<span class="sourceLineNo">610</span><a name="line.610"></a>
-<span class="sourceLineNo">611</span>      initializeFileSystem();<a name="line.611"></a>
-<span class="sourceLineNo">612</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.612"></a>
-<span class="sourceLineNo">613</span><a name="line.613"></a>
-<span class="sourceLineNo">614</span>      this.configurationManager = new ConfigurationManager();<a name="line.614"></a>
-<span class="sourceLineNo">615</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.615"></a>
-<span class="sourceLineNo">616</span><a name="line.616"></a>
-<span class="sourceLineNo">617</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.617"></a>
-<span class="sourceLineNo">618</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.618"></a>
-<span class="sourceLineNo">619</span>        // Open connection to zookeeper and set primary watcher<a name="line.619"></a>
-<span class="sourceLineNo">620</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.620"></a>
-<span class="sourceLineNo">621</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.621"></a>
-<span class="sourceLineNo">622</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.622"></a>
-<span class="sourceLineNo">623</span>        if (!this.masterless) {<a name="line.623"></a>
-<span class="sourceLineNo">624</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.624"></a>
-<span class="sourceLineNo">625</span><a name="line.625"></a>
-<span class="sourceLineNo">626</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.626"></a>
-<span class="sourceLineNo">627</span>          masterAddressTracker.start();<a name="line.627"></a>
-<span class="sourceLineNo">628</span><a name="line.628"></a>
-<span class="sourceLineNo">629</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.629"></a>
-<span class="sourceLineNo">630</span>          clusterStatusTracker.start();<a name="line.630"></a>
-<span class="sourceLineNo">631</span>        } else {<a name="line.631"></a>
-<span class="sourceLineNo">632</span>          masterAddressTracker = null;<a name="line.632"></a>
-<span class="sourceLineNo">633</span>          clusterStatusTracker = null;<a name="line.633"></a>
-<span class="sourceLineNo">634</span>        }<a name="line.634"></a>
-<span class="sourceLineNo">635</span>      } else {<a name="line.635"></a>
-<span class="sourceLineNo">636</span>        zooKeeper = null;<a name="line.636"></a>
-<span class="sourceLineNo">637</span>        masterAddressTracker = null;<a name="line.637"></a>
-<span class="sourceLineNo">638</span>        clusterStatusTracker = null;<a name="line.638"></a>
-<span class="sourceLineNo">639</span>      }<a name="line.639"></a>
-<span class="sourceLineNo">640</span>      this.rpcServices.start(zooKeeper);<a name="line.640"></a>
-<span class="sourceLineNo">641</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.641"></a>
-<span class="sourceLineNo">642</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.642"></a>
-<span class="sourceLineNo">643</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.643"></a>
-<span class="sourceLineNo">644</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.644"></a>
-<span class="sourceLineNo">645</span>      // class HRS. TODO.<a name="line.645"></a>
-<span class="sourceLineNo">646</span>      this.choreService = new ChoreService(getName(), true);<a name="line.646"></a>
-<span class="sourceLineNo">647</span>      this.executorService = new ExecutorService(getName());<a name="line.647"></a>
-<span class="sourceLineNo">648</span>      putUpWebUI();<a name="line.648"></a>
-<span class="sourceLineNo">649</span>    } catch (Throwable t) {<a name="line.649"></a>
-<span class="sourceLineNo">650</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.650"></a>
-<span class="sourceLineNo">651</span>      // cause of failed startup is lost.<a name="line.651"></a>
-<span class="sourceLineNo">652</span>      LOG.error("Failed construction RegionServer", t);<a name="line.652"></a>
-<span class="sourceLineNo">653</span>      throw t;<a name="line.653"></a>
-<span class="sourceLineNo">654</span>    }<a name="line.654"></a>
-<span class="sourceLineNo">655</span>  }<a name="line.655"></a>
-<span class="sourceLineNo">656</span><a name="line.656"></a>
-<span class="sourceLineNo">657</span>  // HMaster should override this method to load the specific config for master<a name="line.657"></a>
-<span class="sourceLineNo">658</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.658"></a>
-<span class="sourceLineNo">659</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.659"></a>
-<span class="sourceLineNo">660</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.660"></a>
-<span class="sourceLineNo">661</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.661"></a>
-<span class="sourceLineNo">662</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.662"></a>
-<span class="sourceLineNo">663</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.663"></a>
-<span class="sourceLineNo">664</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.664"></a>
-<span class="sourceLineNo">665</span>        throw new IOException(msg);<a name="line.665"></a>
-<span class="sourceLineNo">666</span>      } else {<a name="line.666"></a>
-<span class="sourceLineNo">667</span>        return rpcServices.isa.getHostName();<a name="line.667"></a>
-<span class="sourceLineNo">668</span>      }<a name="line.668"></a>
-<span class="sourceLineNo">669</span>    } else {<a name="line.669"></a>
-<span class="sourceLineNo">670</span>      return hostname;<a name="line.670"></a>
-<span class="sourceLineNo">671</span>    }<a name="line.671"></a>
-<span class="sourceLineNo">672</span>  }<a name="line.672"></a>
-<span class="sourceLineNo">673</span><a name="line.673"></a>
-<span class="sourceLineNo">674</span>  /**<a name="line.674"></a>
-<span class="sourceLineNo">675</span>   * If running on Windows, do windows-specific setup.<a name="line.675"></a>
-<span class="sourceLineNo">676</span>   */<a name="line.676"></a>
-<span class="sourceLineNo">677</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.677"></a>
-<span class="sourceLineNo">678</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.678"></a>
-<span class="sourceLineNo">679</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.679"></a>
-<span class="sourceLineNo">680</span>        @Override<a name="line.680"></a>
-<span class="sourceLineNo">681</span>        public void handle(Signal signal) {<a name="line.681"></a>
-<span class="sourceLineNo">682</span>          conf.reloadConfiguration();<a name="line.682"></a>
-<span class="sourceLineNo">683</span>          cm.notifyAllObservers(conf);<a name="line.683"></a>
-<span class="sourceLineNo">684</span>        }<a name="line.684"></a>
-<span class="sourceLineNo">685</span>      });<a name="line.685"></a>
-<span class="sourceLineNo">686</span>    }<a name="line.686"></a>
-<span class="sourceLineNo">687</span>  }<a name="line.687"></a>
-<span class="sourceLineNo">688</span><a name="line.688"></a>
-<span class="sourceLineNo">689</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.689"></a>
-<span class="sourceLineNo">690</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.690"></a>
-<span class="sourceLineNo">691</span>    NettyEventLoopGroupConfig nelgc =<a name="line.691"></a>
-<span class="sourceLineNo">692</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.692"></a>
-<span class="sourceLineNo">693</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.693"></a>
-<span class="sourceLineNo">694</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.694"></a>
-<span class="sourceLineNo">695</span>    return nelgc;<a name="line.695"></a>
-<span class="sourceLineNo">696</span>  }<a name="line.696"></a>
-<span class="sourceLineNo">697</span><a name="line.697"></a>
-<span class="sourceLineNo">698</span>  private void initializeFileSystem() throws IOException {<a name="line.698"></a>
-<span class="sourceLineNo">699</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.699"></a>
-<span class="sourceLineNo">700</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.700"></a>
-<span class="sourceLineNo">701</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.701"></a>
-<span class="sourceLineNo">702</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.702"></a>
-<span class="sourceLineNo">703</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.703"></a>
-<span class="sourceLineNo">704</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.704"></a>
-<span class="sourceLineNo">705</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.705"></a>
-<span class="sourceLineNo">706</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.706"></a>
-<span class="sourceLineNo">707</span>    // (unless all is set to defaults).<a name="line.707"></a>
-<span class="sourceLineNo">708</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.708"></a>
-<span class="sourceLineNo">709</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.709"></a>
-<span class="sourceLineNo">710</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.710"></a>
-<span class="sourceLineNo">711</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.711"></a>
-<span class="sourceLineNo">712</span>  }<a name="line.712"></a>
-<span class="sourceLineNo">713</span><a name="line.713"></a>
-<span class="sourceLineNo">714</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.714"></a>
-<span class="sourceLineNo">715</span>    return new FSTableDescriptors(this.conf,<a name="line.715"></a>
-<span class="sourceLineNo">716</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.716"></a>
+<span class="sourceLineNo">602</span>      // no need to instantiate global block cache when master not carry table<a name="line.602"></a>
+<span class="sourceLineNo">603</span>      if (!isMasterNotCarryTable) {<a name="line.603"></a>
+<span class="sourceLineNo">604</span>        CacheConfig.instantiateBlockCache(conf);<a name="line.604"></a>
+<span class="sourceLineNo">605</span>      }<a name="line.605"></a>
+<span class="sourceLineNo">606</span>      cacheConfig = new CacheConfig(conf);<a name="line.606"></a>
+<span class="sourceLineNo">607</span>      mobCacheConfig = new MobCacheConfig(conf);<a name="line.607"></a>
+<span class="sourceLineNo">608</span><a name="line.608"></a>
+<span class="sourceLineNo">609</span>      uncaughtExceptionHandler = new UncaughtExceptionHandler() {<a name="line.609"></a>
+<span class="sourceLineNo">610</span>        @Override<a name="line.610"></a>
+<span class="sourceLineNo">611</span>        public void uncaughtException(Thread t, Throwable e) {<a name="line.611"></a>
+<span class="sourceLineNo">612</span>          abort("Uncaught exception in executorService thread " + t.getName(), e);<a name="line.612"></a>
+<span class="sourceLineNo">613</span>        }<a name="line.613"></a>
+<span class="sourceLineNo">614</span>      };<a name="line.614"></a>
+<span class="sourceLineNo">615</span><a name="line.615"></a>
+<span class="sourceLineNo">616</span>      initializeFileSystem();<a name="line.616"></a>
+<span class="sourceLineNo">617</span>      spanReceiverHost = SpanReceiverHost.getInstance(getConfiguration());<a name="line.617"></a>
+<span class="sourceLineNo">618</span><a name="line.618"></a>
+<span class="sourceLineNo">619</span>      this.configurationManager = new ConfigurationManager();<a name="line.619"></a>
+<span class="sourceLineNo">620</span>      setupWindows(getConfiguration(), getConfigurationManager());<a name="line.620"></a>
+<span class="sourceLineNo">621</span><a name="line.621"></a>
+<span class="sourceLineNo">622</span>      // Some unit tests don't need a cluster, so no zookeeper at all<a name="line.622"></a>
+<span class="sourceLineNo">623</span>      if (!conf.getBoolean("hbase.testing.nocluster", false)) {<a name="line.623"></a>
+<span class="sourceLineNo">624</span>        // Open connection to zookeeper and set primary watcher<a name="line.624"></a>
+<span class="sourceLineNo">625</span>        zooKeeper = new ZKWatcher(conf, getProcessName() + ":" +<a name="line.625"></a>
+<span class="sourceLineNo">626</span>          rpcServices.isa.getPort(), this, canCreateBaseZNode());<a name="line.626"></a>
+<span class="sourceLineNo">627</span>        // If no master in cluster, skip trying to track one or look for a cluster status.<a name="line.627"></a>
+<span class="sourceLineNo">628</span>        if (!this.masterless) {<a name="line.628"></a>
+<span class="sourceLineNo">629</span>          this.csm = new ZkCoordinatedStateManager(this);<a name="line.629"></a>
+<span class="sourceLineNo">630</span><a name="line.630"></a>
+<span class="sourceLineNo">631</span>          masterAddressTracker = new MasterAddressTracker(getZooKeeper(), this);<a name="line.631"></a>
+<span class="sourceLineNo">632</span>          masterAddressTracker.start();<a name="line.632"></a>
+<span class="sourceLineNo">633</span><a name="line.633"></a>
+<span class="sourceLineNo">634</span>          clusterStatusTracker = new ClusterStatusTracker(zooKeeper, this);<a name="line.634"></a>
+<span class="sourceLineNo">635</span>          clusterStatusTracker.start();<a name="line.635"></a>
+<span class="sourceLineNo">636</span>        } else {<a name="line.636"></a>
+<span class="sourceLineNo">637</span>          masterAddressTracker = null;<a name="line.637"></a>
+<span class="sourceLineNo">638</span>          clusterStatusTracker = null;<a name="line.638"></a>
+<span class="sourceLineNo">639</span>        }<a name="line.639"></a>
+<span class="sourceLineNo">640</span>      } else {<a name="line.640"></a>
+<span class="sourceLineNo">641</span>        zooKeeper = null;<a name="line.641"></a>
+<span class="sourceLineNo">642</span>        masterAddressTracker = null;<a name="line.642"></a>
+<span class="sourceLineNo">643</span>        clusterStatusTracker = null;<a name="line.643"></a>
+<span class="sourceLineNo">644</span>      }<a name="line.644"></a>
+<span class="sourceLineNo">645</span>      this.rpcServices.start(zooKeeper);<a name="line.645"></a>
+<span class="sourceLineNo">646</span>      // This violates 'no starting stuff in Constructor' but Master depends on the below chore<a name="line.646"></a>
+<span class="sourceLineNo">647</span>      // and executor being created and takes a different startup route. Lots of overlap between HRS<a name="line.647"></a>
+<span class="sourceLineNo">648</span>      // and M (An M IS A HRS now). Need to refactor so less duplication between M and its super<a name="line.648"></a>
+<span class="sourceLineNo">649</span>      // Master expects Constructor to put up web servers. Ugh.<a name="line.649"></a>
+<span class="sourceLineNo">650</span>      // class HRS. TODO.<a name="line.650"></a>
+<span class="sourceLineNo">651</span>      this.choreService = new ChoreService(getName(), true);<a name="line.651"></a>
+<span class="sourceLineNo">652</span>      this.executorService = new ExecutorService(getName());<a name="line.652"></a>
+<span class="sourceLineNo">653</span>      putUpWebUI();<a name="line.653"></a>
+<span class="sourceLineNo">654</span>    } catch (Throwable t) {<a name="line.654"></a>
+<span class="sourceLineNo">655</span>      // Make sure we log the exception. HRegionServer is often started via reflection and the<a name="line.655"></a>
+<span class="sourceLineNo">656</span>      // cause of failed startup is lost.<a name="line.656"></a>
+<span class="sourceLineNo">657</span>      LOG.error("Failed construction RegionServer", t);<a name="line.657"></a>
+<span class="sourceLineNo">658</span>      throw t;<a name="line.658"></a>
+<span class="sourceLineNo">659</span>    }<a name="line.659"></a>
+<span class="sourceLineNo">660</span>  }<a name="line.660"></a>
+<span class="sourceLineNo">661</span><a name="line.661"></a>
+<span class="sourceLineNo">662</span>  // HMaster should override this method to load the specific config for master<a name="line.662"></a>
+<span class="sourceLineNo">663</span>  protected String getUseThisHostnameInstead(Configuration conf) throws IOException {<a name="line.663"></a>
+<span class="sourceLineNo">664</span>    String hostname = conf.get(RS_HOSTNAME_KEY);<a name="line.664"></a>
+<span class="sourceLineNo">665</span>    if (conf.getBoolean(RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY, false)) {<a name="line.665"></a>
+<span class="sourceLineNo">666</span>      if (!StringUtils.isBlank(hostname)) {<a name="line.666"></a>
+<span class="sourceLineNo">667</span>        String msg = RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY + " and " + RS_HOSTNAME_KEY +<a name="line.667"></a>
+<span class="sourceLineNo">668</span>          " are mutually exclusive. Do not set " + RS_HOSTNAME_DISABLE_MASTER_REVERSEDNS_KEY +<a name="line.668"></a>
+<span class="sourceLineNo">669</span>          " to true while " + RS_HOSTNAME_KEY + " is used";<a name="line.669"></a>
+<span class="sourceLineNo">670</span>        throw new IOException(msg);<a name="line.670"></a>
+<span class="sourceLineNo">671</span>      } else {<a name="line.671"></a>
+<span class="sourceLineNo">672</span>        return rpcServices.isa.getHostName();<a name="line.672"></a>
+<span class="sourceLineNo">673</span>      }<a name="line.673"></a>
+<span class="sourceLineNo">674</span>    } else {<a name="line.674"></a>
+<span class="sourceLineNo">675</span>      return hostname;<a name="line.675"></a>
+<span class="sourceLineNo">676</span>    }<a name="line.676"></a>
+<span class="sourceLineNo">677</span>  }<a name="line.677"></a>
+<span class="sourceLineNo">678</span><a name="line.678"></a>
+<span class="sourceLineNo">679</span>  /**<a name="line.679"></a>
+<span class="sourceLineNo">680</span>   * If running on Windows, do windows-specific setup.<a name="line.680"></a>
+<span class="sourceLineNo">681</span>   */<a name="line.681"></a>
+<span class="sourceLineNo">682</span>  private static void setupWindows(final Configuration conf, ConfigurationManager cm) {<a name="line.682"></a>
+<span class="sourceLineNo">683</span>    if (!SystemUtils.IS_OS_WINDOWS) {<a name="line.683"></a>
+<span class="sourceLineNo">684</span>      Signal.handle(new Signal("HUP"), new SignalHandler() {<a name="line.684"></a>
+<span class="sourceLineNo">685</span>        @Override<a name="line.685"></a>
+<span class="sourceLineNo">686</span>        public void handle(Signal signal) {<a name="line.686"></a>
+<span class="sourceLineNo">687</span>          conf.reloadConfiguration();<a name="line.687"></a>
+<span class="sourceLineNo">688</span>          cm.notifyAllObservers(conf);<a name="line.688"></a>
+<span class="sourceLineNo">689</span>        }<a name="line.689"></a>
+<span class="sourceLineNo">690</span>      });<a name="line.690"></a>
+<span class="sourceLineNo">691</span>    }<a name="line.691"></a>
+<span class="sourceLineNo">692</span>  }<a name="line.692"></a>
+<span class="sourceLineNo">693</span><a name="line.693"></a>
+<span class="sourceLineNo">694</span>  private static NettyEventLoopGroupConfig setupNetty(Configuration conf) {<a name="line.694"></a>
+<span class="sourceLineNo">695</span>    // Initialize netty event loop group at start as we may use it for rpc server, rpc client &amp; WAL.<a name="line.695"></a>
+<span class="sourceLineNo">696</span>    NettyEventLoopGroupConfig nelgc =<a name="line.696"></a>
+<span class="sourceLineNo">697</span>      new NettyEventLoopGroupConfig(conf, "RS-EventLoopGroup");<a name="line.697"></a>
+<span class="sourceLineNo">698</span>    NettyRpcClientConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.698"></a>
+<span class="sourceLineNo">699</span>    NettyAsyncFSWALConfigHelper.setEventLoopConfig(conf, nelgc.group(), nelgc.clientChannelClass());<a name="line.699"></a>
+<span class="sourceLineNo">700</span>    return nelgc;<a name="line.700"></a>
+<span class="sourceLineNo">701</span>  }<a name="line.701"></a>
+<span class="sourceLineNo">702</span><a name="line.702"></a>
+<span class="sourceLineNo">703</span>  private void initializeFileSystem() throws IOException {<a name="line.703"></a>
+<span class="sourceLineNo">704</span>    // Get fs instance used by this RS.  Do we use checksum verification in the hbase? If hbase<a name="line.704"></a>
+<span class="sourceLineNo">705</span>    // checksum verification enabled, then automatically switch off hdfs checksum verification.<a name="line.705"></a>
+<span class="sourceLineNo">706</span>    boolean useHBaseChecksum = conf.getBoolean(HConstants.HBASE_CHECKSUM_VERIFICATION, true);<a name="line.706"></a>
+<span class="sourceLineNo">707</span>    FSUtils.setFsDefault(this.conf, FSUtils.getWALRootDir(this.conf));<a name="line.707"></a>
+<span class="sourceLineNo">708</span>    this.walFs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.708"></a>
+<span class="sourceLineNo">709</span>    this.walRootDir = FSUtils.getWALRootDir(this.conf);<a name="line.709"></a>
+<span class="sourceLineNo">710</span>    // Set 'fs.defaultFS' to match the filesystem on hbase.rootdir else<a name="line.710"></a>
+<span class="sourceLineNo">711</span>    // underlying hadoop hdfs accessors will be going against wrong filesystem<a name="line.711"></a>
+<span class="sourceLineNo">712</span>    // (unless all is set to defaults).<a name="line.712"></a>
+<span class="sourceLineNo">713</span>    FSUtils.setFsDefault(this.conf, FSUtils.getRootDir(this.conf));<a name="line.713"></a>
+<span class="sourceLineNo">714</span>    this.fs = new HFileSystem(this.conf, useHBaseChecksum);<a name="line.714"></a>
+<span class="sourceLineNo">715</span>    this.rootDir = FSUtils.getRootDir(this.conf);<a name="line.715"></a>
+<span class="sourceLineNo">716</span>    this.tableDescriptors = getFsTableDescriptors();<a name="line.716"></a>
 <span class="sourceLineNo">717</span>  }<a name="line.717"></a>
 <span class="sourceLineNo">718</span><a name="line.718"></a>
-<span class="sourceLineNo">719</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.719"></a>
-<span class="sourceLineNo">720</span>    return null;<a name="line.720"></a>
-<span class="sourceLineNo">721</span>  }<a name="line.721"></a>
-<span class="sourceLineNo">722</span><a name="line.722"></a>
-<span class="sourceLineNo">723</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.723"></a>
-<span class="sourceLineNo">724</span>    user.login("hbase.regionserver.keytab.file",<a name="line.724"></a>
-<span class="sourceLineNo">725</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.725"></a>
+<span class="sourceLineNo">719</span>  protected TableDescriptors getFsTableDescriptors() throws IOException {<a name="line.719"></a>
+<span class="sourceLineNo">720</span>    return new FSTableDescriptors(this.conf,<a name="line.720"></a>
+<span class="sourceLineNo">721</span>      this.fs, this.rootDir, !canUpdateTableDescriptor(), false, getMetaTableObserver());<a name="line.721"></a>
+<span class="sourceLineNo">722</span>  }<a name="line.722"></a>
+<span class="sourceLineNo">723</span><a name="line.723"></a>
+<span class="sourceLineNo">724</span>  protected Function&lt;TableDescriptorBuilder, TableDescriptorBuilder&gt; getMetaTableObserver() {<a name="line.724"></a>
+<span class="sourceLineNo">725</span>    return null;<a name="line.725"></a>
 <span class="sourceLineNo">726</span>  }<a name="line.726"></a>
 <span class="sourceLineNo">727</span><a name="line.727"></a>
-<span class="sourceLineNo">728</span><a name="line.728"></a>
-<span class="sourceLineNo">729</span>  /**<a name="line.729"></a>
-<span class="sourceLineNo">730</span>   * Wait for an active Master.<a name="line.730"></a>
-<span class="sourceLineNo">731</span>   * See override in Master superclass for how it is used.<a name="line.731"></a>
-<span class="sourceLineNo">732</span>   */<a name="line.732"></a>
-<span class="sourceLineNo">733</span>  protected void waitForMasterActive() {}<a name="line.733"></a>
-<span class="sourceLineNo">734</span><a name="line.734"></a>
-<span class="sourceLineNo">735</span>  protected String getProcessName() {<a name="line.735"></a>
-<span class="sourceLineNo">736</span>    return REGIONSERVER;<a name="line.736"></a>
-<span class="sourceLineNo">737</span>  }<a name="line.737"></a>
-<span class="sourceLineNo">738</span><a name="line.738"></a>
-<span class="sourceLineNo">739</span>  protected boolean canCreateBaseZNode() {<a name="line.739"></a>
-<span class="sourceLineNo">740</span>    return this.masterless;<a name="line.740"></a>
-<span class="sourceLineNo">741</span>  }<a name="line.741"></a>
-<span class="sourceLineNo">742</span><a name="line.742"></a>
-<span class="sourceLineNo">743</span>  protected boolean canUpdateTableDescriptor() {<a name="line.743"></a>
-<span class="sourceLineNo">744</span>    return false;<a name="line.744"></a>
-<span class="sourceLineNo">745</span>  }<a name="line.745"></a>
-<span class="sourceLineNo">746</span><a name="line.746"></a>
-<span class="sourceLineNo">747</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.747"></a>
-<span class="sourceLineNo">748</span>    return new RSRpcServices(this);<a name="line.748"></a>
-<span class="sourceLineNo">749</span>  }<a name="line.749"></a>
-<span class="sourceLineNo">750</span><a name="line.750"></a>
-<span class="sourceLineNo">751</span>  protected void configureInfoServer() {<a name="line.751"></a>
-<span class="sourceLineNo">752</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.752"></a>
-<span class="sourceLineNo">753</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.753"></a>
+<span class="sourceLineNo">728</span>  protected void login(UserProvider user, String host) throws IOException {<a name="line.728"></a>
+<span class="sourceLineNo">729</span>    user.login("hbase.regionserver.keytab.file",<a name="line.729"></a>
+<span class="sourceLineNo">730</span>      "hbase.regionserver.kerberos.principal", host);<a name="line.730"></a>
+<span class="sourceLineNo">731</span>  }<a name="line.731"></a>
+<span class="sourceLineNo">732</span><a name="line.732"></a>
+<span class="sourceLineNo">733</span><a name="line.733"></a>
+<span class="sourceLineNo">734</span>  /**<a name="line.734"></a>
+<span class="sourceLineNo">735</span>   * Wait for an active Master.<a name="line.735"></a>
+<span class="sourceLineNo">736</span>   * See override in Master superclass for how it is used.<a name="line.736"></a>
+<span class="sourceLineNo">737</span>   */<a name="line.737"></a>
+<span class="sourceLineNo">738</span>  protected void waitForMasterActive() {}<a name="line.738"></a>
+<span class="sourceLineNo">739</span><a name="line.739"></a>
+<span class="sourceLineNo">740</span>  protected String getProcessName() {<a name="line.740"></a>
+<span class="sourceLineNo">741</span>    return REGIONSERVER;<a name="line.741"></a>
+<span class="sourceLineNo">742</span>  }<a name="line.742"></a>
+<span class="sourceLineNo">743</span><a name="line.743"></a>
+<span class="sourceLineNo">744</span>  protected boolean canCreateBaseZNode() {<a name="line.744"></a>
+<span class="sourceLineNo">745</span>    return this.masterless;<a name="line.745"></a>
+<span class="sourceLineNo">746</span>  }<a name="line.746"></a>
+<span class="sourceLineNo">747</span><a name="line.747"></a>
+<span class="sourceLineNo">748</span>  protected boolean canUpdateTableDescriptor() {<a name="line.748"></a>
+<span class="sourceLineNo">749</span>    return false;<a name="line.749"></a>
+<span class="sourceLineNo">750</span>  }<a name="line.750"></a>
+<span class="sourceLineNo">751</span><a name="line.751"></a>
+<span class="sourceLineNo">752</span>  protected RSRpcServices createRpcServices() throws IOException {<a name="line.752"></a>
+<span class="sourceLineNo">753</span>    return new RSRpcServices(this);<a name="line.753"></a>
 <span class="sourceLineNo">754</span>  }<a name="line.754"></a>
 <span class="sourceLineNo">755</span><a name="line.755"></a>
-<span class="sourceLineNo">756</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.756"></a>
-<span class="sourceLineNo">757</span>    return RSDumpServlet.class;<a name="line.757"></a>
-<span class="sourceLineNo">758</span>  }<a name="line.758"></a>
-<span class="sourceLineNo">759</span><a name="line.759"></a>
-<span class="sourceLineNo">760</span>  @Override<a name="line.760"></a>
-<span class="sourceLineNo">761</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.761"></a>
-<span class="sourceLineNo">762</span>    /*<a name="line.762"></a>
-<span class="sourceLineNo">763</span>     * No stacking of instances is allowed for a single executorService name<a name="line.763"></a>
-<span class="sourceLineNo">764</span>     */<a name="line.764"></a>
-<span class="sourceLineNo">765</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.765"></a>
-<span class="sourceLineNo">766</span>        instance.getDescriptorForType();<a name="line.766"></a>
-<span class="sourceLineNo">767</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.767"></a>
-<span class="sourceLineNo">768</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.768"></a>
-<span class="sourceLineNo">769</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.769"></a>
-<span class="sourceLineNo">770</span>          + " already registered, rejecting request from " + instance);<a name="line.770"></a>
-<span class="sourceLineNo">771</span>      return false;<a name="line.771"></a>
-<span class="sourceLineNo">772</span>    }<a name="line.772"></a>
-<span class="sourceLineNo">773</span><a name="line.773"></a>
-<span class="sourceLineNo">774</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.774"></a>
-<span class="sourceLineNo">775</span>    if (LOG.isDebugEnabled()) {<a name="line.775"></a>
-<span class="sourceLineNo">776</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.776"></a>
+<span class="sourceLineNo">756</span>  protected void configureInfoServer() {<a name="line.756"></a>
+<span class="sourceLineNo">757</span>    infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class);<a name="line.757"></a>
+<span class="sourceLineNo">758</span>    infoServer.setAttribute(REGIONSERVER, this);<a name="line.758"></a>
+<span class="sourceLineNo">759</span>  }<a name="line.759"></a>
+<span class="sourceLineNo">760</span><a name="line.760"></a>
+<span class="sourceLineNo">761</span>  protected Class&lt;? extends HttpServlet&gt; getDumpServlet() {<a name="line.761"></a>
+<span class="sourceLineNo">762</span>    return RSDumpServlet.class;<a name="line.762"></a>
+<span class="sourceLineNo">763</span>  }<a name="line.763"></a>
+<span class="sourceLineNo">764</span><a name="line.764"></a>
+<span class="sourceLineNo">765</span>  @Override<a name="line.765"></a>
+<span class="sourceLineNo">766</span>  public boolean registerService(com.google.protobuf.Service instance) {<a name="line.766"></a>
+<span class="sourceLineNo">767</span>    /*<a name="line.767"></a>
+<span class="sourceLineNo">768</span>     * No stacking of instances is allowed for a single executorService name<a name="line.768"></a>
+<span class="sourceLineNo">769</span>     */<a name="line.769"></a>
+<span class="sourceLineNo">770</span>    com.google.protobuf.Descriptors.ServiceDescriptor serviceDesc =<a name="line.770"></a>
+<span class="sourceLineNo">771</span>        instance.getDescriptorForType();<a name="line.771"></a>
+<span class="sourceLineNo">772</span>    String serviceName = CoprocessorRpcUtils.getServiceName(serviceDesc);<a name="line.772"></a>
+<span class="sourceLineNo">773</span>    if (coprocessorServiceHandlers.containsKey(serviceName)) {<a name="line.773"></a>
+<span class="sourceLineNo">774</span>      LOG.error("Coprocessor executorService " + serviceName<a name="line.774"></a>
+<span class="sourceLineNo">775</span>          + " already registered, rejecting request from " + instance);<a name="line.775"></a>
+<span class="sourceLineNo">776</span>      return false;<a name="line.776"></a>
 <span class="sourceLineNo">777</span>    }<a name="line.777"></a>
-<span class="sourceLineNo">778</span>    return true;<a name="line.778"></a>
-<span class="sourceLineNo">779</span>  }<a name="line.779"></a>
-<span class="sourceLineNo">780</span><a name="line.780"></a>
-<span class="sourceLineNo">781</span>  /**<a name="line.781"></a>
-<span class="sourceLineNo">782</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.782"></a>
-<span class="sourceLineNo">783</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.783"></a>
-<span class="sourceLineNo">784</span>   */<a name="line.784"></a>
-<span class="sourceLineNo">785</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.785"></a>
-<span class="sourceLineNo">786</span>    Configuration conf = this.conf;<a name="line.786"></a>
-<span class="sourceLineNo">787</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.787"></a>
-<span class="sourceLineNo">788</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.788"></a>
-<span class="sourceLineNo">789</span>      // the conf and unset the client ZK related properties<a name="line.789"></a>
-<span class="sourceLineNo">790</span>      conf = new Configuration(this.conf);<a name="line.790"></a>
-<span class="sourceLineNo">791</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.791"></a>
-<span class="sourceLineNo">792</span>    }<a name="line.792"></a>
-<span class="sourceLineNo">793</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.793"></a>
-<span class="sourceLineNo">794</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.794"></a>
-<span class="sourceLineNo">795</span>    // and remote invocations.<a name="line.795"></a>
-<span class="sourceLineNo">796</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.796"></a>
-<span class="sourceLineNo">797</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.797"></a>
-<span class="sourceLineNo">798</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.798"></a>
-<span class="sourceLineNo">799</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.799"></a>
-<span class="sourceLineNo">800</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.800"></a>
-<span class="sourceLineNo">801</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.801"></a>
-<span class="sourceLineNo">802</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.802"></a>
-<span class="sourceLineNo">803</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.803"></a>
-<span class="sourceLineNo">804</span>    return conn;<a name="line.804"></a>
-<span class="sourceLineNo">805</span>  }<a name="line.805"></a>
-<span class="sourceLineNo">806</span><a name="line.806"></a>
-<span class="sourceLineNo">807</span>  /**<a name="line.807"></a>
-<span class="sourceLineNo">808</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.808"></a>
-<span class="sourceLineNo">809</span>   * @param c<a name="line.809"></a>
-<span class="sourceLineNo">810</span>   * @throws IOException<a name="line.810"></a>
-<span class="sourceLineNo">811</span>   */<a name="line.811"></a>
-<span class="sourceLineNo">812</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.812"></a>
-<span class="sourceLineNo">813</span>    // check to see if the codec list is available:<a name="line.813"></a>
-<span class="sourceLineNo">814</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.814"></a>
-<span class="sourceLineNo">815</span>    if (codecs == null) return;<a name="line.815"></a>
-<span class="sourceLineNo">816</span>    for (String codec : codecs) {<a name="line.816"></a>
-<span class="sourceLineNo">817</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.817"></a>
-<span class="sourceLineNo">818</span>        throw new IOException("Compression codec " + codec +<a name="line.818"></a>
-<span class="sourceLineNo">819</span>          " not supported, aborting RS construction");<a name="line.819"></a>
-<span class="sourceLineNo">820</span>      }<a name="line.820"></a>
-<span class="sourceLineNo">821</span>    }<a name="line.821"></a>
-<span class="sourceLineNo">822</span>  }<a name="line.822"></a>
-<span class="sourceLineNo">823</span><a name="line.823"></a>
-<span class="sourceLineNo">824</span>  public String getClusterId() {<a name="line.824"></a>
-<span class="sourceLineNo">825</span>    return this.clusterId;<a name="line.825"></a>
-<span class="sourceLineNo">826</span>  }<a name="line.826"></a>
-<span class="sourceLineNo">827</span><a name="line.827"></a>
-<span class="sourceLineNo">828</span>  /**<a name="line.828"></a>
-<span class="sourceLineNo">829</span>   * Setup our cluster connection if not already initialized.<a name="line.829"></a>
-<span class="sourceLineNo">830</span>   */<a name="line.830"></a>
-<span class="sourceLineNo">831</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.831"></a>
-<span class="sourceLineNo">832</span>    if (clusterConnection == null) {<a name="line.832"></a>
-<span class="sourceLineNo">833</span>      clusterConnection = createClusterConnection();<a name="line.833"></a>
-<span class="sourceLineNo">834</span>      metaTableLocator = new MetaTableLocator();<a name="line.834"></a>
-<span class="sourceLineNo">835</span>    }<a name="line.835"></a>
-<span class="sourceLineNo">836</span>  }<a name="line.836"></a>
-<span class="sourceLineNo">837</span><a name="line.837"></a>
-<span class="sourceLineNo">838</span>  /**<a name="line.838"></a>
-<span class="sourceLineNo">839</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.839"></a>
-<span class="sourceLineNo">840</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.840"></a>
-<span class="sourceLineNo">841</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.841"></a>
-<span class="sourceLineNo">842</span>   */<a name="line.842"></a>
-<span class="sourceLineNo">843</span>  private void preRegistrationInitialization() {<a name="line.843"></a>
-<span class="sourceLineNo">844</span>    try {<a name="line.844"></a>
-<span class="sourceLineNo">845</span>      initializeZooKeeper();<a name="line.845"></a>
-<span class="sourceLineNo">846</span>      setupClusterConnection();<a name="line.846"></a>
-<span class="sourceLineNo">847</span>      // Setup RPC client for master communication<a name="line.847"></a>
-<span class="sourceLineNo">848</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.848"></a>
-<span class="sourceLineNo">849</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.849"></a>
-<span class="sourceLineNo">850</span>    } catch (Throwable t) {<a name="line.850"></a>
-<span class="sourceLineNo">851</span>      // Call stop if error or process will stick around for ever since server<a name="line.851"></a>
-<span class="sourceLineNo">852</span>      // puts up non-daemon threads.<a name="line.852"></a>
-<span class="sourceLineNo">853</span>      this.rpcServices.stop();<a name="line.853"></a>
-<span class="sourceLineNo">854</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.854"></a>
-<span class="sourceLineNo">855</span>    }<a name="line.855"></a>
-<span class="sourceLineNo">856</span>  }<a name="line.856"></a>
-<span class="sourceLineNo">857</span><a name="line.857"></a>
-<span class="sourceLineNo">858</span>  /**<a name="line.858"></a>
-<span class="sourceLineNo">859</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.859"></a>
-<span class="sourceLineNo">860</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.860"></a>
-<span class="sourceLineNo">861</span>   * &lt;p&gt;<a name="line.861"></a>
-<span class="sourceLineNo">862</span>   * Finally open long-living server short-circuit connection.<a name="line.862"></a>
-<span class="sourceLineNo">863</span>   */<a name="line.863"></a>
-<span class="sourceLineNo">864</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.864"></a>
-<span class="sourceLineNo">865</span>    justification="cluster Id znode read would give us correct response")<a name="line.865"></a>
-<span class="sourceLineNo">866</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.866"></a>
-<span class="sourceLineNo">867</span>    // Nothing to do in here if no Master in the mix.<a name="line.867"></a>
-<span class="sourceLineNo">868</span>    if (this.masterless) {<a name="line.868"></a>
-<span class="sourceLineNo">869</span>      return;<a name="line.869"></a>
-<span class="sourceLineNo">870</span>    }<a name="line.870"></a>
-<span class="sourceLineNo">871</span><a name="line.871"></a>
-<span class="sourceLineNo">872</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.872"></a>
-<span class="sourceLineNo">873</span>    // block until a master is available.  No point in starting up if no master<a name="line.873"></a>
-<span class="sourceLineNo">874</span>    // running.<a name="line.874"></a>
-<span class="sourceLineNo">875</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.875"></a>
+<span class="sourceLineNo">778</span><a name="line.778"></a>
+<span class="sourceLineNo">779</span>    coprocessorServiceHandlers.put(serviceName, instance);<a name="line.779"></a>
+<span class="sourceLineNo">780</span>    if (LOG.isDebugEnabled()) {<a name="line.780"></a>
+<span class="sourceLineNo">781</span>      LOG.debug("Registered regionserver coprocessor executorService: executorService=" + serviceName);<a name="line.781"></a>
+<span class="sourceLineNo">782</span>    }<a name="line.782"></a>
+<span class="sourceLineNo">783</span>    return true;<a name="line.783"></a>
+<span class="sourceLineNo">784</span>  }<a name="line.784"></a>
+<span class="sourceLineNo">785</span><a name="line.785"></a>
+<span class="sourceLineNo">786</span>  /**<a name="line.786"></a>
+<span class="sourceLineNo">787</span>   * Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the<a name="line.787"></a>
+<span class="sourceLineNo">788</span>   * local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.<a name="line.788"></a>
+<span class="sourceLineNo">789</span>   */<a name="line.789"></a>
+<span class="sourceLineNo">790</span>  private ClusterConnection createClusterConnection() throws IOException {<a name="line.790"></a>
+<span class="sourceLineNo">791</span>    Configuration conf = this.conf;<a name="line.791"></a>
+<span class="sourceLineNo">792</span>    if (conf.get(HConstants.CLIENT_ZOOKEEPER_QUORUM) != null) {<a name="line.792"></a>
+<span class="sourceLineNo">793</span>      // Use server ZK cluster for server-issued connections, so we clone<a name="line.793"></a>
+<span class="sourceLineNo">794</span>      // the conf and unset the client ZK related properties<a name="line.794"></a>
+<span class="sourceLineNo">795</span>      conf = new Configuration(this.conf);<a name="line.795"></a>
+<span class="sourceLineNo">796</span>      conf.unset(HConstants.CLIENT_ZOOKEEPER_QUORUM);<a name="line.796"></a>
+<span class="sourceLineNo">797</span>    }<a name="line.797"></a>
+<span class="sourceLineNo">798</span>    // Create a cluster connection that when appropriate, can short-circuit and go directly to the<a name="line.798"></a>
+<span class="sourceLineNo">799</span>    // local server if the request is to the local server bypassing RPC. Can be used for both local<a name="line.799"></a>
+<span class="sourceLineNo">800</span>    // and remote invocations.<a name="line.800"></a>
+<span class="sourceLineNo">801</span>    ClusterConnection conn = ConnectionUtils.createShortCircuitConnection(conf, null,<a name="line.801"></a>
+<span class="sourceLineNo">802</span>      userProvider.getCurrent(), serverName, rpcServices, rpcServices);<a name="line.802"></a>
+<span class="sourceLineNo">803</span>    // This is used to initialize the batch thread pool inside the connection implementation.<a name="line.803"></a>
+<span class="sourceLineNo">804</span>    // When deploy a fresh cluster, we may first use the cluster connection in InitMetaProcedure,<a name="line.804"></a>
+<span class="sourceLineNo">805</span>    // which will be executed inside the PEWorker, and then the batch thread pool will inherit the<a name="line.805"></a>
+<span class="sourceLineNo">806</span>    // thread group of PEWorker, which will be destroy when shutting down the ProcedureExecutor. It<a name="line.806"></a>
+<span class="sourceLineNo">807</span>    // will cause lots of procedure related UTs to fail, so here let's initialize it first, no harm.<a name="line.807"></a>
+<span class="sourceLineNo">808</span>    conn.getTable(TableName.META_TABLE_NAME).close();<a name="line.808"></a>
+<span class="sourceLineNo">809</span>    return conn;<a name="line.809"></a>
+<span class="sourceLineNo">810</span>  }<a name="line.810"></a>
+<span class="sourceLineNo">811</span><a name="line.811"></a>
+<span class="sourceLineNo">812</span>  /**<a name="line.812"></a>
+<span class="sourceLineNo">813</span>   * Run test on configured codecs to make sure supporting libs are in place.<a name="line.813"></a>
+<span class="sourceLineNo">814</span>   * @param c<a name="line.814"></a>
+<span class="sourceLineNo">815</span>   * @throws IOException<a name="line.815"></a>
+<span class="sourceLineNo">816</span>   */<a name="line.816"></a>
+<span class="sourceLineNo">817</span>  private static void checkCodecs(final Configuration c) throws IOException {<a name="line.817"></a>
+<span class="sourceLineNo">818</span>    // check to see if the codec list is available:<a name="line.818"></a>
+<span class="sourceLineNo">819</span>    String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);<a name="line.819"></a>
+<span class="sourceLineNo">820</span>    if (codecs == null) return;<a name="line.820"></a>
+<span class="sourceLineNo">821</span>    for (String codec : codecs) {<a name="line.821"></a>
+<span class="sourceLineNo">822</span>      if (!CompressionTest.testCompression(codec)) {<a name="line.822"></a>
+<span class="sourceLineNo">823</span>        throw new IOException("Compression codec " + codec +<a name="line.823"></a>
+<span class="sourceLineNo">824</span>          " not supported, aborting RS construction");<a name="line.824"></a>
+<span class="sourceLineNo">825</span>      }<a name="line.825"></a>
+<span class="sourceLineNo">826</span>    }<a name="line.826"></a>
+<span class="sourceLineNo">827</span>  }<a name="line.827"></a>
+<span class="sourceLineNo">828</span><a name="line.828"></a>
+<span class="sourceLineNo">829</span>  public String getClusterId() {<a name="line.829"></a>
+<span class="sourceLineNo">830</span>    return this.clusterId;<a name="line.830"></a>
+<span class="sourceLineNo">831</span>  }<a name="line.831"></a>
+<span class="sourceLineNo">832</span><a name="line.832"></a>
+<span class="sourceLineNo">833</span>  /**<a name="line.833"></a>
+<span class="sourceLineNo">834</span>   * Setup our cluster connection if not already initialized.<a name="line.834"></a>
+<span class="sourceLineNo">835</span>   */<a name="line.835"></a>
+<span class="sourceLineNo">836</span>  protected synchronized void setupClusterConnection() throws IOException {<a name="line.836"></a>
+<span class="sourceLineNo">837</span>    if (clusterConnection == null) {<a name="line.837"></a>
+<span class="sourceLineNo">838</span>      clusterConnection = createClusterConnection();<a name="line.838"></a>
+<span class="sourceLineNo">839</span>      metaTableLocator = new MetaTableLocator();<a name="line.839"></a>
+<span class="sourceLineNo">840</span>    }<a name="line.840"></a>
+<span class="sourceLineNo">841</span>  }<a name="line.841"></a>
+<span class="sourceLineNo">842</span><a name="line.842"></a>
+<span class="sourceLineNo">843</span>  /**<a name="line.843"></a>
+<span class="sourceLineNo">844</span>   * All initialization needed before we go register with Master.&lt;br&gt;<a name="line.844"></a>
+<span class="sourceLineNo">845</span>   * Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.&lt;br&gt;<a name="line.845"></a>
+<span class="sourceLineNo">846</span>   * In here we just put up the RpcServer, setup Connection, and ZooKeeper.<a name="line.846"></a>
+<span class="sourceLineNo">847</span>   */<a name="line.847"></a>
+<span class="sourceLineNo">848</span>  private void preRegistrationInitialization() {<a name="line.848"></a>
+<span class="sourceLineNo">849</span>    try {<a name="line.849"></a>
+<span class="sourceLineNo">850</span>      initializeZooKeeper();<a name="line.850"></a>
+<span class="sourceLineNo">851</span>      setupClusterConnection();<a name="line.851"></a>
+<span class="sourceLineNo">852</span>      // Setup RPC client for master communication<a name="line.852"></a>
+<span class="sourceLineNo">853</span>      this.rpcClient = RpcClientFactory.createClient(conf, clusterId, new InetSocketAddress(<a name="line.853"></a>
+<span class="sourceLineNo">854</span>          this.rpcServices.isa.getAddress(), 0), clusterConnection.getConnectionMetrics());<a name="line.854"></a>
+<span class="sourceLineNo">855</span>    } catch (Throwable t) {<a name="line.855"></a>
+<span class="sourceLineNo">856</span>      // Call stop if error or process will stick around for ever since server<a name="line.856"></a>
+<span class="sourceLineNo">857</span>      // puts up non-daemon threads.<a name="line.857"></a>
+<span class="sourceLineNo">858</span>      this.rpcServices.stop();<a name="line.858"></a>
+<span class="sourceLineNo">859</span>      abort("Initialization of RS failed.  Hence aborting RS.", t);<a name="line.859"></a>
+<span class="sourceLineNo">860</span>    }<a name="line.860"></a>
+<span class="sourceLineNo">861</span>  }<a name="line.861"></a>
+<span class="sourceLineNo">862</span><a name="line.862"></a>
+<span class="sourceLineNo">863</span>  /**<a name="line.863"></a>
+<span class="sourceLineNo">864</span>   * Bring up connection to zk ensemble and then wait until a master for this cluster and then after<a name="line.864"></a>
+<span class="sourceLineNo">865</span>   * that, wait until cluster 'up' flag has been set. This is the order in which master does things.<a name="line.865"></a>
+<span class="sourceLineNo">866</span>   * &lt;p&gt;<a name="line.866"></a>
+<span class="sourceLineNo">867</span>   * Finally open long-living server short-circuit connection.<a name="line.867"></a>
+<span class="sourceLineNo">868</span>   */<a name="line.868"></a>
+<span class="sourceLineNo">869</span>  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",<a name="line.869"></a>
+<span class="sourceLineNo">870</span>    justification="cluster Id znode read would give us correct response")<a name="line.870"></a>
+<span class="sourceLineNo">871</span>  private void initializeZooKeeper() throws IOException, InterruptedException {<a name="line.871"></a>
+<span class="sourceLineNo">872</span>    // Nothing to do in here if no Master in the mix.<a name="line.872"></a>
+<span class="sourceLineNo">873</span>    if (this.masterless) {<a name="line.873"></a>
+<span class="sourceLineNo">874</span>      return;<a name="line.874"></a>
+<span class="sourceLineNo">875</span>    }<a name="line.875"></a>
 <span class="sourceLineNo">876</span><a name="line.876"></a>
-<span class="sourceLineNo">877</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.877"></a>
-<span class="sourceLineNo">878</span>    // when ready.<a name="line.878"></a>
-<span class="sourceLineNo">879</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.879"></a>
-<span class="sourceLineNo">880</span><a name="line.880"></a>
-<span class="sourceLineNo">881</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.881"></a>
-<span class="sourceLineNo">882</span>    if (clusterId == null) {<a name="line.882"></a>
-<span class="sourceLineNo">883</span>      // Retrieve clusterId<a name="line.883"></a>
-<span class="sourceLineNo">884</span>      // Since cluster status is now up<a name="line.884"></a>
-<span class="sourceLineNo">885</span>      // ID should have already been set by HMaster<a name="line.885"></a>
-<span class="sourceLineNo">886</span>      try {<a name="line.886"></a>
-<span class="sourceLineNo">887</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.887"></a>
-<span class="sourceLineNo">888</span>        if (clusterId == null) {<a name="line.888"></a>
-<span class="sourceLineNo">889</span>          this.abort("Cluster ID has not been set");<a name="line.889"></a>
-<span class="sourceLineNo">890</span>        }<a name="line.890"></a>
-<span class="sourceLineNo">891</span>        LOG.info("ClusterId : " + clusterId);<a name="line.891"></a>
-<span class="sourceLineNo">892</span>      } catch (KeeperException e) {<a name="line.892"></a>
-<span class="sourceLineNo">893</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.893"></a>
-<span class="sourceLineNo">894</span>      }<a name="line.894"></a>
-<span class="sourceLineNo">895</span>    }<a name="line.895"></a>
-<span class="sourceLineNo">896</span><a name="line.896"></a>
-<span class="sourceLineNo">897</span>    waitForMasterActive();<a name="line.897"></a>
-<span class="sourceLineNo">898</span>    if (isStopped() || isAborted()) {<a name="line.898"></a>
-<span class="sourceLineNo">899</span>      return; // No need for further initialization<a name="line.899"></a>
+<span class="sourceLineNo">877</span>    // Create the master address tracker, register with zk, and start it.  Then<a name="line.877"></a>
+<span class="sourceLineNo">878</span>    // block until a master is available.  No point in starting up if no master<a name="line.878"></a>
+<span class="sourceLineNo">879</span>    // running.<a name="line.879"></a>
+<span class="sourceLineNo">880</span>    blockAndCheckIfStopped(this.masterAddressTracker);<a name="line.880"></a>
+<span class="sourceLineNo">881</span><a name="line.881"></a>
+<span class="sourceLineNo">882</span>    // Wait on cluster being up.  Master will set this flag up in zookeeper<a name="line.882"></a>
+<span class="sourceLineNo">883</span>    // when ready.<a name="line.883"></a>
+<span class="sourceLineNo">884</span>    blockAndCheckIfStopped(this.clusterStatusTracker);<a name="line.884"></a>
+<span class="sourceLineNo">885</span><a name="line.885"></a>
+<span class="sourceLineNo">886</span>    // If we are HMaster then the cluster id should have already been set.<a name="line.886"></a>
+<span class="sourceLineNo">887</span>    if (clusterId == null) {<a name="line.887"></a>
+<span class="sourceLineNo">888</span>      // Retrieve clusterId<a name="line.888"></a>
+<span class="sourceLineNo">889</span>      // Since cluster status is now up<a name="line.889"></a>
+<span class="sourceLineNo">890</span>      // ID should have already been set by HMaster<a name="line.890"></a>
+<span class="sourceLineNo">891</span>      try {<a name="line.891"></a>
+<span class="sourceLineNo">892</span>        clusterId = ZKClusterId.readClusterIdZNode(this.zooKeeper);<a name="line.892"></a>
+<span class="sourceLineNo">893</span>        if (clusterId == null) {<a name="line.893"></a>
+<span class="sourceLineNo">894</span>          this.abort("Cluster ID has not been set");<a name="line.894"></a>
+<span class="sourceLineNo">895</span>        }<a name="line.895"></a>
+<span class="sourceLineNo">896</span>        LOG.info("ClusterId : " + clusterId);<a name="line.896"></a>
+<span class="sourceLineNo">897</span>      } catch (KeeperException e) {<a name="line.897"></a>
+<span class="sourceLineNo">898</span>        this.abort("Failed to retrieve Cluster ID", e);<a name="line.898"></a>
+<span class="sourceLineNo">899</span>      }<a name="line.899"></a>
 <span class="sourceLineNo">900</span>    }<a name="line.900"></a>
 <span class="sourceLineNo">901</span><a name="line.901"></a>
-<span class="sourceLineNo">902</span>    // watch for snapshots and other procedures<a name="line.902"></a>
-<span class="sourceLineNo">903</span>    try {<a name="line.903"></a>
-<span class="sourceLineNo">904</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.904"></a>
-<span class="sourceLineNo">905</span>      rspmHost.loadProcedures(conf);<a name="line.905"></a>
-<span class="sourceLineNo">906</span>      rspmHost.initialize(this);<a name="line.906"></a>
-<span class="sourceLineNo">907</span>    } catch (KeeperException e) {<a name="line.907"></a>
-<span class="sourceLineNo">908</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.908"></a>
-<span class="sourceLineNo">909</span>    }<a name="line.909"></a>
-<span class="sourceLineNo">910</span>  }<a name="line.910"></a>
-<span class="sourceLineNo">911</span><a name="line.911"></a>
-<span class="sourceLineNo">912</span>  /**<a name="line.912"></a>
-<span class="sourceLineNo">913</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.913"></a>
-<span class="sourceLineNo">914</span>   * if the region server is shut down<a name="line.914"></a>
-<span class="sourceLineNo">915</span>   * @param tracker znode tracker to use<a name="line.915"></a>
-<span class="sourceLineNo">916</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.916"></a>
-<span class="sourceLineNo">917</span>   * @throws InterruptedException<a name="line.917"></a>
-<span class="sourceLineNo">918</span>   */<a name="line.918"></a>
-<span class="sourceLineNo">919</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.919"></a>
-<span class="sourceLineNo">920</span>      throws IOException, InterruptedException {<a name="line.920"></a>
-<span class="sourceLineNo">921</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.921"></a>
-<span class="sourceLineNo">922</span>      if (this.stopped) {<a name="line.922"></a>
-<span class="sourceLineNo">923</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.923"></a>
-<span class="sourceLineNo">924</span>      }<a name="line.924"></a>
-<span class="sourceLineNo">925</span>    }<a name="line.925"></a>
-<span class="sourceLineNo">926</span>  }<a name="line.926"></a>
-<span class="sourceLineNo">927</span><a name="line.927"></a>
-<span class="sourceLineNo">928</span>  /**<a name="line.928"></a>
-<span class="sourceLineNo">929</span>   * @return True if the cluster is up.<a name="line.929"></a>
-<span class="sourceLineNo">930</span>   */<a name="line.930"></a>
-<span class="sourceLineNo">931</span>  @Override<a name="line.931"></a>
-<span class="sourceLineNo">932</span>  public boolean isClusterUp() {<a name="line.932"></a>
-<span class="sourceLineNo">933</span>    return this.masterless ||<a name="line.933"></a>
-<span class="sourceLineNo">934</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.934"></a>
-<span class="sourceLineNo">935</span>  }<a name="line.935"></a>
-<span class="sourceLineNo">936</span><a name="line.936"></a>
-<span class="sourceLineNo">937</span>  /**<a name="line.937"></a>
-<span class="sourceLineNo">938</span>   * The HRegionServer sticks in this loop until closed.<a name="line.938"></a>
-<span class="sourceLineNo">939</span>   */<a name="line.939"></a>
-<span class="sourceLineNo">940</span>  @Override<a name="line.940"></a>
-<span class="sourceLineNo">941</span>  public void run() {<a name="line.941"></a>
-<span class="sourceLineNo">942</span>    try {<a name="line.942"></a>
-<span class="sourceLineNo">943</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.943"></a>
-<span class="sourceLineNo">944</span>      preRegistrationInitialization();<a name="line.944"></a>
-<span class="sourceLineNo">945</span>    } catch (Throwable e) {<a name="line.945"></a>
-<span class="sourceLineNo">946</span>      abort("Fatal exception during initialization", e);<a name="line.946"></a>
-<span class="sourceLineNo">947</span>    }<a name="line.947"></a>
-<span class="sourceLineNo">948</span><a name="line.948"></a>
-<span class="sourceLineNo">949</span>    try {<a name="line.949"></a>
-<span class="sourceLineNo">950</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.950"></a>
-<span class="sourceLineNo">951</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.951"></a>
-<span class="sourceLineNo">952</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.952"></a>
-<span class="sourceLineNo">953</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.953"></a>
-<span class="sourceLineNo">954</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.954"></a>
-<span class="sourceLineNo">955</span>      }<a name="line.955"></a>
-<span class="sourceLineNo">956</span><a name="line.956"></a>
-<span class="sourceLineNo">957</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.957"></a>
-<span class="sourceLineNo">958</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.958"></a>
-<span class="sourceLineNo">959</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.959"></a>
-<span class="sourceLineNo">960</span>      LOG.debug("About to register with Master.");<a name="line.960"></a>
-<span class="sourceLineNo">961</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.961"></a>
-<span class="sourceLineNo">962</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.962"></a>
-<span class="sourceLineNo">963</span>      RetryCounter rc = rcf.create();<a name="line.963"></a>
-<span class="sourceLineNo">964</span>      while (keepLooping()) {<a name="line.964"></a>
-<span class="sourceLineNo">965</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.965"></a>
-<span class="sourceLineNo">966</span>        if (w == null) {<a name="line.966"></a>
-<span class="sourceLineNo">967</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.967"></a>
-<span class="sourceLineNo">968</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.968"></a>
-<span class="sourceLineNo">969</span>          this.sleeper.sleep(sleepTime);<a name="line.969"></a>
-<span class="sourceLineNo">970</span>        } else {<a name="line.970"></a>
-<span class="sourceLineNo">971</span>          handleReportForDutyResponse(w);<a name="line.971"></a>
-<span class="sourceLineNo">972</span>          break;<a name="line.972"></a>
-<span class="sourceLineNo">973</span>        }<a name="line.973"></a>
-<span class="sourceLineNo">974</span>      }<a name="line.974"></a>
-<span class="sourceLineNo">975</span><a name="line.975"></a>
-<span class="sourceLineNo">976</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.976"></a>
-<span class="sourceLineNo">977</span>        // start the snapshot handler and other procedure handlers,<a name="line.977"></a>
-<span class="sourceLineNo">978</span>        // since the server is ready to run<a name="line.978"></a>
-<span class="sourceLineNo">979</span>        if (this.rspmHost != null) {<a name="line.979"></a>
-<span class="sourceLineNo">980</span>          this.rspmHost.start();<a name="line.980"></a>
-<span class="sourceLineNo">981</span>        }<a name="line.981"></a>
-<span class="sourceLineNo">982</span>        // Start the Quota Manager<a name="line.982"></a>
-<span class="sourceLineNo">983</span>        if (this.rsQuotaManager != null) {<a name="line.983"></a>
-<span class="sourceLineNo">984</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.984"></a>
-<span class="sourceLineNo">985</span>        }<a name="line.985"></a>
-<span class="sourceLineNo">986</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.986"></a>
-<span class="sourceLineNo">987</span>          this.rsSpaceQuotaManager.start();<a name="line.987"></a>
-<span class="sourceLineNo">988</span>        }<a name="line.988"></a>
-<span class="sourceLineNo">989</span>      }<a name="line.989"></a>
-<span class="sourceLineNo">990</span><a name="line.990"></a>
-<span class="sourceLineNo">991</span>      // We registered with the Master.  Go into run mode.<a name="line.991"></a>
-<span class="sourceLineNo">992</span>      long lastMsg = System.currentTimeMillis();<a name="line.992"></a>
-<span class="sourceLineNo">993</span>      long oldRequestCount = -1;<a name="line.993"></a>
-<span class="sourceLineNo">994</span>      // The main run loop.<a name="line.994"></a>
-<span class="sourceLineNo">995</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.995"></a>
-<span class="sourceLineNo">996</span>        if (!isClusterUp()) {<a name="line.996"></a>
-<span class="sourceLineNo">997</span>          if (isOnlineRegionsEmpty()) {<a name="line.997"></a>
-<span class="sourceLineNo">998</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.998"></a>
-<span class="sourceLineNo">999</span>          } else if (!this.stopping) {<a name="line.999"></a>
-<span class="sourceLineNo">1000</span>            this.stopping = true;<a name="line.1000"></a>
-<span class="sourceLineNo">1001</span>            LOG.info("Closing user regions");<a name="line.1001"></a>
-<span class="sourceLineNo">1002</span>            closeUserRegions(this.abortRequested);<a name="line.1002"></a>
-<span class="sourceLineNo">1003</span>          } else if (this.stopping) {<a name="line.1003"></a>
-<span class="sourceLineNo">1004</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1004"></a>
-<span class="sourceLineNo">1005</span>            if (allUserRegionsOffline) {<a name="line.1005"></a>
-<span class="sourceLineNo">1006</span>              // Set stopped if no more write requests tp meta tables<a name="line.1006"></a>
-<span class="sourceLineNo">1007</span>              // since last time we went around the loop.  Any open<a name="line.1007"></a>
-<span class="sourceLineNo">1008</span>              // meta regions will be closed on our way out.<a name="line.1008"></a>
-<span class="sourceLineNo">1009</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1009"></a>
-<span class="sourceLineNo">1010</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1010"></a>
-<span class="sourceLineNo">1011</span>                break;<a name="line.1011"></a>
-<span class="sourceLineNo">1012</span>              }<a name="line.1012"></a>
-<span class="sourceLineNo">1013</span>              oldRequestCount = getWriteRequestCount();<a name="line.1013"></a>
-<span class="sourceLineNo">1014</span>            } else {<a name="line.1014"></a>
-<span class="sourceLineNo">1015</span>              // Make sure all regions have been closed -- some regions may<a name="line.1015"></a>
-<span class="sourceLineNo">1016</span>              // have not got it because we were splitting at the time of<a name="line.1016"></a>
-<span class="sourceLineNo">1017</span>              // the call to closeUserRegions.<a name="line.1017"></a>
-<span class="sourceLineNo">1018</span>              closeUserRegions(this.abortRequested);<a name="line.1018"></a>
-<span class="sourceLineNo">1019</span>            }<a name="line.1019"></a>
-<span class="sourceLineNo">1020</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1020"></a>
-<span class="sourceLineNo">1021</span>          }<a name="line.1021"></a>
-<span class="sourceLineNo">1022</span>        }<a name="line.1022"></a>
-<span class="sourceLineNo">1023</span>        long now = System.currentTimeMillis();<a name="line.1023"></a>
-<span class="sourceLineNo">1024</span>        if ((now - lastMsg) &gt;= msgInterval) {<a name="line.1024"></a>
-<span class="sourceLineNo">1025</span>          tryRegionServerReport(lastMsg, now);<a name="line.1025"></a>
-<span class="sourceLineNo">1026</span>          lastMsg = System.currentTimeMillis();<a name="line.1026"></a>
+<span class="sourceLineNo">902</span>    waitForMasterActive();<a name="line.902"></a>
+<span class="sourceLineNo">903</span>    if (isStopped() || isAborted()) {<a name="line.903"></a>
+<span class="sourceLineNo">904</span>      return; // No need for further initialization<a name="line.904"></a>
+<span class="sourceLineNo">905</span>    }<a name="line.905"></a>
+<span class="sourceLineNo">906</span><a name="line.906"></a>
+<span class="sourceLineNo">907</span>    // watch for snapshots and other procedures<a name="line.907"></a>
+<span class="sourceLineNo">908</span>    try {<a name="line.908"></a>
+<span class="sourceLineNo">909</span>      rspmHost = new RegionServerProcedureManagerHost();<a name="line.909"></a>
+<span class="sourceLineNo">910</span>      rspmHost.loadProcedures(conf);<a name="line.910"></a>
+<span class="sourceLineNo">911</span>      rspmHost.initialize(this);<a name="line.911"></a>
+<span class="sourceLineNo">912</span>    } catch (KeeperException e) {<a name="line.912"></a>
+<span class="sourceLineNo">913</span>      this.abort("Failed to reach coordination cluster when creating procedure handler.", e);<a name="line.913"></a>
+<span class="sourceLineNo">914</span>    }<a name="line.914"></a>
+<span class="sourceLineNo">915</span>  }<a name="line.915"></a>
+<span class="sourceLineNo">916</span><a name="line.916"></a>
+<span class="sourceLineNo">917</span>  /**<a name="line.917"></a>
+<span class="sourceLineNo">918</span>   * Utilty method to wait indefinitely on a znode availability while checking<a name="line.918"></a>
+<span class="sourceLineNo">919</span>   * if the region server is shut down<a name="line.919"></a>
+<span class="sourceLineNo">920</span>   * @param tracker znode tracker to use<a name="line.920"></a>
+<span class="sourceLineNo">921</span>   * @throws IOException any IO exception, plus if the RS is stopped<a name="line.921"></a>
+<span class="sourceLineNo">922</span>   * @throws InterruptedException<a name="line.922"></a>
+<span class="sourceLineNo">923</span>   */<a name="line.923"></a>
+<span class="sourceLineNo">924</span>  private void blockAndCheckIfStopped(ZKNodeTracker tracker)<a name="line.924"></a>
+<span class="sourceLineNo">925</span>      throws IOException, InterruptedException {<a name="line.925"></a>
+<span class="sourceLineNo">926</span>    while (tracker.blockUntilAvailable(this.msgInterval, false) == null) {<a name="line.926"></a>
+<span class="sourceLineNo">927</span>      if (this.stopped) {<a name="line.927"></a>
+<span class="sourceLineNo">928</span>        throw new IOException("Received the shutdown message while waiting.");<a name="line.928"></a>
+<span class="sourceLineNo">929</span>      }<a name="line.929"></a>
+<span class="sourceLineNo">930</span>    }<a name="line.930"></a>
+<span class="sourceLineNo">931</span>  }<a name="line.931"></a>
+<span class="sourceLineNo">932</span><a name="line.932"></a>
+<span class="sourceLineNo">933</span>  /**<a name="line.933"></a>
+<span class="sourceLineNo">934</span>   * @return True if the cluster is up.<a name="line.934"></a>
+<span class="sourceLineNo">935</span>   */<a name="line.935"></a>
+<span class="sourceLineNo">936</span>  @Override<a name="line.936"></a>
+<span class="sourceLineNo">937</span>  public boolean isClusterUp() {<a name="line.937"></a>
+<span class="sourceLineNo">938</span>    return this.masterless ||<a name="line.938"></a>
+<span class="sourceLineNo">939</span>        (this.clusterStatusTracker != null &amp;&amp; this.clusterStatusTracker.isClusterUp());<a name="line.939"></a>
+<span class="sourceLineNo">940</span>  }<a name="line.940"></a>
+<span class="sourceLineNo">941</span><a name="line.941"></a>
+<span class="sourceLineNo">942</span>  /**<a name="line.942"></a>
+<span class="sourceLineNo">943</span>   * The HRegionServer sticks in this loop until closed.<a name="line.943"></a>
+<span class="sourceLineNo">944</span>   */<a name="line.944"></a>
+<span class="sourceLineNo">945</span>  @Override<a name="line.945"></a>
+<span class="sourceLineNo">946</span>  public void run() {<a name="line.946"></a>
+<span class="sourceLineNo">947</span>    try {<a name="line.947"></a>
+<span class="sourceLineNo">948</span>      // Do pre-registration initializations; zookeeper, lease threads, etc.<a name="line.948"></a>
+<span class="sourceLineNo">949</span>      preRegistrationInitialization();<a name="line.949"></a>
+<span class="sourceLineNo">950</span>    } catch (Throwable e) {<a name="line.950"></a>
+<span class="sourceLineNo">951</span>      abort("Fatal exception during initialization", e);<a name="line.951"></a>
+<span class="sourceLineNo">952</span>    }<a name="line.952"></a>
+<span class="sourceLineNo">953</span><a name="line.953"></a>
+<span class="sourceLineNo">954</span>    try {<a name="line.954"></a>
+<span class="sourceLineNo">955</span>      if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.955"></a>
+<span class="sourceLineNo">956</span>        ShutdownHook.install(conf, fs, this, Thread.currentThread());<a name="line.956"></a>
+<span class="sourceLineNo">957</span>        // Initialize the RegionServerCoprocessorHost now that our ephemeral<a name="line.957"></a>
+<span class="sourceLineNo">958</span>        // node was created, in case any coprocessors want to use ZooKeeper<a name="line.958"></a>
+<span class="sourceLineNo">959</span>        this.rsHost = new RegionServerCoprocessorHost(this, this.conf);<a name="line.959"></a>
+<span class="sourceLineNo">960</span>      }<a name="line.960"></a>
+<span class="sourceLineNo">961</span><a name="line.961"></a>
+<span class="sourceLineNo">962</span>      // Try and register with the Master; tell it we are here.  Break if server is stopped or the<a name="line.962"></a>
+<span class="sourceLineNo">963</span>      // clusterup flag is down or hdfs went wacky. Once registered successfully, go ahead and start<a name="line.963"></a>
+<span class="sourceLineNo">964</span>      // up all Services. Use RetryCounter to get backoff in case Master is struggling to come up.<a name="line.964"></a>
+<span class="sourceLineNo">965</span>      LOG.debug("About to register with Master.");<a name="line.965"></a>
+<span class="sourceLineNo">966</span>      RetryCounterFactory rcf = new RetryCounterFactory(Integer.MAX_VALUE,<a name="line.966"></a>
+<span class="sourceLineNo">967</span>          this.sleeper.getPeriod(), 1000 * 60 * 5);<a name="line.967"></a>
+<span class="sourceLineNo">968</span>      RetryCounter rc = rcf.create();<a name="line.968"></a>
+<span class="sourceLineNo">969</span>      while (keepLooping()) {<a name="line.969"></a>
+<span class="sourceLineNo">970</span>        RegionServerStartupResponse w = reportForDuty();<a name="line.970"></a>
+<span class="sourceLineNo">971</span>        if (w == null) {<a name="line.971"></a>
+<span class="sourceLineNo">972</span>          long sleepTime = rc.getBackoffTimeAndIncrementAttempts();<a name="line.972"></a>
+<span class="sourceLineNo">973</span>          LOG.warn("reportForDuty failed; sleeping {} ms and then retrying.", sleepTime);<a name="line.973"></a>
+<span class="sourceLineNo">974</span>          this.sleeper.sleep(sleepTime);<a name="line.974"></a>
+<span class="sourceLineNo">975</span>        } else {<a name="line.975"></a>
+<span class="sourceLineNo">976</span>          handleReportForDutyResponse(w);<a name="line.976"></a>
+<span class="sourceLineNo">977</span>          break;<a name="line.977"></a>
+<span class="sourceLineNo">978</span>        }<a name="line.978"></a>
+<span class="sourceLineNo">979</span>      }<a name="line.979"></a>
+<span class="sourceLineNo">980</span><a name="line.980"></a>
+<span class="sourceLineNo">981</span>      if (!isStopped() &amp;&amp; isHealthy()) {<a name="line.981"></a>
+<span class="sourceLineNo">982</span>        // start the snapshot handler and other procedure handlers,<a name="line.982"></a>
+<span class="sourceLineNo">983</span>        // since the server is ready to run<a name="line.983"></a>
+<span class="sourceLineNo">984</span>        if (this.rspmHost != null) {<a name="line.984"></a>
+<span class="sourceLineNo">985</span>          this.rspmHost.start();<a name="line.985"></a>
+<span class="sourceLineNo">986</span>        }<a name="line.986"></a>
+<span class="sourceLineNo">987</span>        // Start the Quota Manager<a name="line.987"></a>
+<span class="sourceLineNo">988</span>        if (this.rsQuotaManager != null) {<a name="line.988"></a>
+<span class="sourceLineNo">989</span>          rsQuotaManager.start(getRpcServer().getScheduler());<a name="line.989"></a>
+<span class="sourceLineNo">990</span>        }<a name="line.990"></a>
+<span class="sourceLineNo">991</span>        if (this.rsSpaceQuotaManager != null) {<a name="line.991"></a>
+<span class="sourceLineNo">992</span>          this.rsSpaceQuotaManager.start();<a name="line.992"></a>
+<span class="sourceLineNo">993</span>        }<a name="line.993"></a>
+<span class="sourceLineNo">994</span>      }<a name="line.994"></a>
+<span class="sourceLineNo">995</span><a name="line.995"></a>
+<span class="sourceLineNo">996</span>      // We registered with the Master.  Go into run mode.<a name="line.996"></a>
+<span class="sourceLineNo">997</span>      long lastMsg = System.currentTimeMillis();<a name="line.997"></a>
+<span class="sourceLineNo">998</span>      long oldRequestCount = -1;<a name="line.998"></a>
+<span class="sourceLineNo">999</span>      // The main run loop.<a name="line.999"></a>
+<span class="sourceLineNo">1000</span>      while (!isStopped() &amp;&amp; isHealthy()) {<a name="line.1000"></a>
+<span class="sourceLineNo">1001</span>        if (!isClusterUp()) {<a name="line.1001"></a>
+<span class="sourceLineNo">1002</span>          if (isOnlineRegionsEmpty()) {<a name="line.1002"></a>
+<span class="sourceLineNo">1003</span>            stop("Exiting; cluster shutdown set and not carrying any regions");<a name="line.1003"></a>
+<span class="sourceLineNo">1004</span>          } else if (!this.stopping) {<a name="line.1004"></a>
+<span class="sourceLineNo">1005</span>            this.stopping = true;<a name="line.1005"></a>
+<span class="sourceLineNo">1006</span>            LOG.info("Closing user regions");<a name="line.1006"></a>
+<span class="sourceLineNo">1007</span>            closeUserRegions(this.abortRequested);<a name="line.1007"></a>
+<span class="sourceLineNo">1008</span>          } else if (this.stopping) {<a name="line.1008"></a>
+<span class="sourceLineNo">1009</span>            boolean allUserRegionsOffline = areAllUserRegionsOffline();<a name="line.1009"></a>
+<span class="sourceLineNo">1010</span>            if (allUserRegionsOffline) {<a name="line.1010"></a>
+<span class="sourceLineNo">1011</span>              // Set stopped if no more write requests tp meta tables<a name="line.1011"></a>
+<span class="sourceLineNo">1012</span>              // since last time we went around the loop.  Any open<a name="line.1012"></a>
+<span class="sourceLineNo">1013</span>              // meta regions will be closed on our way out.<a name="line.1013"></a>
+<span class="sourceLineNo">1014</span>              if (oldRequestCount == getWriteRequestCount()) {<a name="line.1014"></a>
+<span class="sourceLineNo">1015</span>                stop("Stopped; only catalog regions remaining online");<a name="line.1015"></a>
+<span class="sourceLineNo">1016</span>                break;<a name="line.1016"></a>
+<span class="sourceLineNo">1017</span>              }<a name="line.1017"></a>
+<span class="sourceLineNo">1018</span>              oldRequestCount = getWriteRequestCount();<a name="line.1018"></a>
+<span class="sourceLineNo">1019</span>            } else {<a name="line.1019"></a>
+<span class="sourceLineNo">1020</span>              // Make sure all regions have been closed -- some regions may<a name="line.1020"></a>
+<span class="sourceLineNo">1021</span>              // have not got it because we were splitting at the time of<a name="line.1021"></a>
+<span class="sourceLineNo">1022</span>              // the call to closeUserRegions.<a name="line.1022"></a>
+<span class="sourceLineNo">1023</span>              closeUserRegions(this.abortRequested);<a name="line.1023"></a>
+<span class="sourceLineNo">1024</span>            }<a name="line.1024"></a>
+<span class="sourceLineNo">1025</span>            LOG.debug("Waiting on " + getOnlineRegionsAsPrintableString());<a name="line.1025"></a>
+<span class="sourceLineNo">1026</span>          }<a name="line.1026"></a>
 <span class="sourceLineNo">1027</span>        }<a name="line.1027"></a>
-<span class="sourceLineNo">1028</span>        if (!isStopped() &amp;&amp; !isAborted()) {<a name="line.1028"></a>
-<span class="sourceLineNo">1029</span>          this.sleeper.sleep();<a name="line.1029"></a>
-<span class="sourceLineNo">1030</span>        }<a name="line.1030"></a>
-<span class="sourceLineNo">1031</span>      } // for<a name="line.1031"></a>
-<span class="sourceLineNo">1032</span>    } catch (Throwable t) {<a name="line.1032"></a>
-<span class="sourceLineNo">1033</span>      if (!rpcServices.checkOOME(t)) {<a name="line.1033"></a>
-<span class="sourceLineNo">1034</span>        String prefix = t instanceof YouAreDeadException? "": "Unhandled: ";<a name="line.1034"></a>
-<span class="sourceLineNo">1035</span>        abort(prefix + t.getMessage(), t);<a name="line.1035"></a>
-<span class="sourceLineNo">1036</span>      }<a name="line.1036"></a>
-<span class="sourceLineNo">1037</span>    }<a name="line.1037"></a>
-<span class="sourceLineNo">1038</span><a name="line.1038"></a>
-<span class="sourceLineNo">1039</span>    if (abortRequested) {<a name="line.1039"></a>
-<span class="sourceLineNo">1040</span>      Timer abortMonitor = new Timer("Abort regionserver monitor", true);<a name="line.1040"></a>
-<span class="sourceLineNo">1041</span>      TimerTask abortTimeoutTask = null;<a name="line.1041"></a>
-<span class="sourceLineNo">1042</span>      try {<a name="line.1042"></a>
-<span class="sourceLineNo">1043</span>        abortTimeoutTask =<a name="line.1043"></a>
-<span class="sourceLineNo">1044</span>            Class.forName(conf.get(ABORT_TIMEOUT_TASK, SystemExitWhenAbortTimeout.class.getName()))<a name="line.1044"></a>
-<span class="sourceLineNo">1045</span>                .asSubclass(TimerTask.class).getDeclaredConstructor().newInstance();<a name="line.1045"></a>
-<span class="sourceLineNo">1046</span>      } catch (Exception e) {<a name="line.1046"></a>
-<span class="sourceLineNo">1047</span>        LOG.warn("Initialize abort timeout task failed", e);<a name="line.1047"></a>
-<span class="sourceLineNo">1048</span>      }<a name="line.1048"></a>
-<span class="sourceLineNo">1049</span>      if (abortTimeoutTask != null) {<a name="line.1049"></a>
-<span class="sourceLineNo">1050</span>        abortMonitor.schedule(abortT

<TRUNCATED>

[04/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html
index a21fcb7..84d6ae8 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.html
@@ -96,253 +96,254 @@
 <span class="sourceLineNo">088</span>      conf.setFloat("hbase.regionserver.global.memstore.size", 0.1f);<a name="line.88"></a>
 <span class="sourceLineNo">089</span>    }<a name="line.89"></a>
 <span class="sourceLineNo">090</span>    tableName = TableName.valueOf(name.getMethodName());<a name="line.90"></a>
-<span class="sourceLineNo">091</span>  }<a name="line.91"></a>
-<span class="sourceLineNo">092</span><a name="line.92"></a>
-<span class="sourceLineNo">093</span>  @After<a name="line.93"></a>
-<span class="sourceLineNo">094</span>  public void tearDown() throws Exception {<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    EnvironmentEdgeManagerTestHelper.reset();<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    LOG.info("Cleaning test directory: " + test_util.getDataTestDir());<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    test_util.cleanupTestDir();<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    CacheConfig.clearGlobalInstances();<a name="line.98"></a>
-<span class="sourceLineNo">099</span>  }<a name="line.99"></a>
-<span class="sourceLineNo">100</span><a name="line.100"></a>
-<span class="sourceLineNo">101</span>  String getName() {<a name="line.101"></a>
-<span class="sourceLineNo">102</span>    return name.getMethodName();<a name="line.102"></a>
-<span class="sourceLineNo">103</span>  }<a name="line.103"></a>
-<span class="sourceLineNo">104</span><a name="line.104"></a>
-<span class="sourceLineNo">105</span>  @Test<a name="line.105"></a>
-<span class="sourceLineNo">106</span>  public void testBasicScanWithLRUCache() throws IOException {<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    setUp(false);<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    byte[] row1 = Bytes.toBytes("row1");<a name="line.108"></a>
-<span class="sourceLineNo">109</span>    byte[] qf1 = Bytes.toBytes("qualifier1");<a name="line.109"></a>
-<span class="sourceLineNo">110</span>    byte[] qf2 = Bytes.toBytes("qualifier2");<a name="line.110"></a>
-<span class="sourceLineNo">111</span>    byte[] fam1 = Bytes.toBytes("lrucache");<a name="line.111"></a>
-<span class="sourceLineNo">112</span><a name="line.112"></a>
-<span class="sourceLineNo">113</span>    long ts1 = 1; // System.currentTimeMillis();<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    long ts2 = ts1 + 1;<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    long ts3 = ts1 + 2;<a name="line.115"></a>
-<span class="sourceLineNo">116</span><a name="line.116"></a>
-<span class="sourceLineNo">117</span>    // Setting up region<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    String method = this.getName();<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    this.region = initHRegion(tableName, method, conf, test_util, fam1);<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    try {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>      List&lt;Cell&gt; expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false);<a name="line.121"></a>
-<span class="sourceLineNo">122</span><a name="line.122"></a>
-<span class="sourceLineNo">123</span>      List&lt;Cell&gt; actual = performScan(row1, fam1);<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      // Verify result<a name="line.124"></a>
-<span class="sourceLineNo">125</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.125"></a>
-<span class="sourceLineNo">126</span>        assertFalse(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.126"></a>
-<span class="sourceLineNo">127</span>        assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));<a name="line.127"></a>
-<span class="sourceLineNo">128</span>      }<a name="line.128"></a>
-<span class="sourceLineNo">129</span>      // do the scan again and verify. This time it should be from the lru cache<a name="line.129"></a>
-<span class="sourceLineNo">130</span>      actual = performScan(row1, fam1);<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      // Verify result<a name="line.131"></a>
-<span class="sourceLineNo">132</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.132"></a>
-<span class="sourceLineNo">133</span>        assertFalse(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.133"></a>
-<span class="sourceLineNo">134</span>        assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      }<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>    } finally {<a name="line.137"></a>
-<span class="sourceLineNo">138</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      this.region = null;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    }<a name="line.140"></a>
-<span class="sourceLineNo">141</span>  }<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>  @Test<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  public void testBasicScanWithOffheapBucketCache() throws IOException {<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    setUp(true);<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    byte[] row1 = Bytes.toBytes("row1offheap");<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    byte[] qf1 = Bytes.toBytes("qualifier1");<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    byte[] qf2 = Bytes.toBytes("qualifier2");<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    byte[] fam1 = Bytes.toBytes("famoffheap");<a name="line.149"></a>
-<span class="sourceLineNo">150</span><a name="line.150"></a>
-<span class="sourceLineNo">151</span>    long ts1 = 1; // System.currentTimeMillis();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    long ts2 = ts1 + 1;<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    long ts3 = ts1 + 2;<a name="line.153"></a>
-<span class="sourceLineNo">154</span><a name="line.154"></a>
-<span class="sourceLineNo">155</span>    // Setting up region<a name="line.155"></a>
-<span class="sourceLineNo">156</span>    String method = this.getName();<a name="line.156"></a>
-<span class="sourceLineNo">157</span>    this.region = initHRegion(tableName, method, conf, test_util, fam1);<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    try {<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      List&lt;Cell&gt; expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false);<a name="line.159"></a>
-<span class="sourceLineNo">160</span><a name="line.160"></a>
-<span class="sourceLineNo">161</span>      List&lt;Cell&gt; actual = performScan(row1, fam1);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      // Verify result<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        assertFalse(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));<a name="line.165"></a>
-<span class="sourceLineNo">166</span>      }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>      // Wait for the bucket cache threads to move the data to offheap<a name="line.167"></a>
-<span class="sourceLineNo">168</span>      Thread.sleep(500);<a name="line.168"></a>
-<span class="sourceLineNo">169</span>      // do the scan again and verify. This time it should be from the bucket cache in offheap mode<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      actual = performScan(row1, fam1);<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      // Verify result<a name="line.171"></a>
-<span class="sourceLineNo">172</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.172"></a>
-<span class="sourceLineNo">173</span>        assertTrue(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.173"></a>
-<span class="sourceLineNo">174</span>        assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));<a name="line.174"></a>
-<span class="sourceLineNo">175</span>      }<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>    } catch (InterruptedException e) {<a name="line.177"></a>
-<span class="sourceLineNo">178</span>    } finally {<a name="line.178"></a>
-<span class="sourceLineNo">179</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      this.region = null;<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    }<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  }<a name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>  @Test<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException {<a name="line.185"></a>
-<span class="sourceLineNo">186</span>    setUp(true);<a name="line.186"></a>
-<span class="sourceLineNo">187</span>    byte[] row1 = Bytes.toBytes("row1offheap");<a name="line.187"></a>
-<span class="sourceLineNo">188</span>    byte[] qf1 = Bytes.toBytes("qualifier1");<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    byte[] qf2 = Bytes.toBytes("qualifier2");<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    byte[] fam1 = Bytes.toBytes("famoffheap");<a name="line.190"></a>
-<span class="sourceLineNo">191</span><a name="line.191"></a>
-<span class="sourceLineNo">192</span>    long ts1 = 1; // System.currentTimeMillis();<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    long ts2 = ts1 + 1;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>    long ts3 = ts1 + 2;<a name="line.194"></a>
-<span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>    // Setting up region<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    String method = this.getName();<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    this.region = initHRegion(tableName, method, conf, test_util, fam1);<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    try {<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      List&lt;Cell&gt; expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, true);<a name="line.200"></a>
-<span class="sourceLineNo">201</span><a name="line.201"></a>
-<span class="sourceLineNo">202</span>      List&lt;Cell&gt; actual = performScan(row1, fam1);<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      // Verify result<a name="line.203"></a>
-<span class="sourceLineNo">204</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.204"></a>
-<span class="sourceLineNo">205</span>        assertFalse(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>        assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));<a name="line.206"></a>
-<span class="sourceLineNo">207</span>      }<a name="line.207"></a>
-<span class="sourceLineNo">208</span>      // Wait for the bucket cache threads to move the data to offheap<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      Thread.sleep(500);<a name="line.209"></a>
-<span class="sourceLineNo">210</span>      // do the scan again and verify. This time it should be from the bucket cache in offheap mode<a name="line.210"></a>
-<span class="sourceLineNo">211</span>      // but one of the cell will be copied due to the asSubByteBuff call<a name="line.211"></a>
-<span class="sourceLineNo">212</span>      Scan scan = new Scan(row1);<a name="line.212"></a>
-<span class="sourceLineNo">213</span>      scan.addFamily(fam1);<a name="line.213"></a>
-<span class="sourceLineNo">214</span>      scan.setMaxVersions(10);<a name="line.214"></a>
-<span class="sourceLineNo">215</span>      actual = new ArrayList&lt;&gt;();<a name="line.215"></a>
-<span class="sourceLineNo">216</span>      InternalScanner scanner = region.getScanner(scan);<a name="line.216"></a>
-<span class="sourceLineNo">217</span><a name="line.217"></a>
-<span class="sourceLineNo">218</span>      boolean hasNext = scanner.next(actual);<a name="line.218"></a>
-<span class="sourceLineNo">219</span>      assertEquals(false, hasNext);<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      // Verify result<a name="line.220"></a>
-<span class="sourceLineNo">221</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.221"></a>
-<span class="sourceLineNo">222</span>        if (i != 5) {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>          // the last cell fetched will be of type shareable but not offheap because<a name="line.223"></a>
-<span class="sourceLineNo">224</span>          // the MBB is copied to form a single cell<a name="line.224"></a>
-<span class="sourceLineNo">225</span>          assertTrue(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>        }<a name="line.226"></a>
-<span class="sourceLineNo">227</span>      }<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    } catch (InterruptedException e) {<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    } finally {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      this.region = null;<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    }<a name="line.233"></a>
-<span class="sourceLineNo">234</span>  }<a name="line.234"></a>
-<span class="sourceLineNo">235</span><a name="line.235"></a>
-<span class="sourceLineNo">236</span>  private List&lt;Cell&gt; insertData(byte[] row1, byte[] qf1, byte[] qf2, byte[] fam1, long ts1,<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      long ts2, long ts3, boolean withVal) throws IOException {<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    // Putting data in Region<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    Put put = null;<a name="line.239"></a>
-<span class="sourceLineNo">240</span>    KeyValue kv13 = null;<a name="line.240"></a>
-<span class="sourceLineNo">241</span>    KeyValue kv12 = null;<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    KeyValue kv11 = null;<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>    KeyValue kv23 = null;<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    KeyValue kv22 = null;<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    KeyValue kv21 = null;<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    if (!withVal) {<a name="line.247"></a>
-<span class="sourceLineNo">248</span>      kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>      kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>      kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);<a name="line.250"></a>
-<span class="sourceLineNo">251</span><a name="line.251"></a>
-<span class="sourceLineNo">252</span>      kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);<a name="line.252"></a>
-<span class="sourceLineNo">253</span>      kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    } else {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, val);<a name="line.256"></a>
-<span class="sourceLineNo">257</span>      kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, val);<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, val);<a name="line.258"></a>
-<span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>      kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, val);<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, val);<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, val);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    }<a name="line.263"></a>
-<span class="sourceLineNo">264</span><a name="line.264"></a>
-<span class="sourceLineNo">265</span>    put = new Put(row1);<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    put.add(kv13);<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    put.add(kv12);<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    put.add(kv11);<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    put.add(kv23);<a name="line.269"></a>
-<span class="sourceLineNo">270</span>    put.add(kv22);<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    put.add(kv21);<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    region.put(put);<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    region.flush(true);<a name="line.273"></a>
-<span class="sourceLineNo">274</span>    HStore store = region.getStore(fam1);<a name="line.274"></a>
-<span class="sourceLineNo">275</span>    while (store.getStorefilesCount() &lt;= 0) {<a name="line.275"></a>
-<span class="sourceLineNo">276</span>      try {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>        Thread.sleep(20);<a name="line.277"></a>
-<span class="sourceLineNo">278</span>      } catch (InterruptedException e) {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      }<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    }<a name="line.280"></a>
-<span class="sourceLineNo">281</span><a name="line.281"></a>
-<span class="sourceLineNo">282</span>    // Expected<a name="line.282"></a>
-<span class="sourceLineNo">283</span>    List&lt;Cell&gt; expected = new ArrayList&lt;&gt;();<a name="line.283"></a>
-<span class="sourceLineNo">284</span>    expected.add(kv13);<a name="line.284"></a>
-<span class="sourceLineNo">285</span>    expected.add(kv12);<a name="line.285"></a>
-<span class="sourceLineNo">286</span>    expected.add(kv23);<a name="line.286"></a>
-<span class="sourceLineNo">287</span>    expected.add(kv22);<a name="line.287"></a>
-<span class="sourceLineNo">288</span>    return expected;<a name="line.288"></a>
-<span class="sourceLineNo">289</span>  }<a name="line.289"></a>
-<span class="sourceLineNo">290</span><a name="line.290"></a>
-<span class="sourceLineNo">291</span>  private List&lt;Cell&gt; performScan(byte[] row1, byte[] fam1) throws IOException {<a name="line.291"></a>
-<span class="sourceLineNo">292</span>    Scan scan = new Scan(row1);<a name="line.292"></a>
-<span class="sourceLineNo">293</span>    scan.addFamily(fam1);<a name="line.293"></a>
-<span class="sourceLineNo">294</span>    scan.setMaxVersions(MAX_VERSIONS);<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    List&lt;Cell&gt; actual = new ArrayList&lt;&gt;();<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    InternalScanner scanner = region.getScanner(scan);<a name="line.296"></a>
-<span class="sourceLineNo">297</span><a name="line.297"></a>
-<span class="sourceLineNo">298</span>    boolean hasNext = scanner.next(actual);<a name="line.298"></a>
-<span class="sourceLineNo">299</span>    assertEquals(false, hasNext);<a name="line.299"></a>
-<span class="sourceLineNo">300</span>    return actual;<a name="line.300"></a>
-<span class="sourceLineNo">301</span>  }<a name="line.301"></a>
-<span class="sourceLineNo">302</span><a name="line.302"></a>
-<span class="sourceLineNo">303</span>  private static HRegion initHRegion(TableName tableName, String callingMethod, Configuration conf,<a name="line.303"></a>
-<span class="sourceLineNo">304</span>      HBaseTestingUtility test_util, byte[]... families) throws IOException {<a name="line.304"></a>
-<span class="sourceLineNo">305</span>    return initHRegion(tableName, null, null, callingMethod, conf, test_util, false, families);<a name="line.305"></a>
-<span class="sourceLineNo">306</span>  }<a name="line.306"></a>
-<span class="sourceLineNo">307</span><a name="line.307"></a>
-<span class="sourceLineNo">308</span>  private static HRegion initHRegion(TableName tableName, byte[] startKey, byte[] stopKey,<a name="line.308"></a>
-<span class="sourceLineNo">309</span>      String callingMethod, Configuration conf, HBaseTestingUtility test_util, boolean isReadOnly,<a name="line.309"></a>
-<span class="sourceLineNo">310</span>      byte[]... families) throws IOException {<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    Path logDir = test_util.getDataTestDirOnTestFS(callingMethod + ".log");<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    HRegionInfo hri = new HRegionInfo(tableName, startKey, stopKey);<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    final WAL wal = HBaseTestingUtility.createWal(conf, logDir, hri);<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    return initHRegion(tableName, startKey, stopKey, callingMethod, conf, test_util, isReadOnly,<a name="line.315"></a>
-<span class="sourceLineNo">316</span>      Durability.SYNC_WAL, wal, families);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>  }<a name="line.317"></a>
-<span class="sourceLineNo">318</span><a name="line.318"></a>
-<span class="sourceLineNo">319</span>  /**<a name="line.319"></a>
-<span class="sourceLineNo">320</span>   * @param tableName<a name="line.320"></a>
-<span class="sourceLineNo">321</span>   * @param startKey<a name="line.321"></a>
-<span class="sourceLineNo">322</span>   * @param stopKey<a name="line.322"></a>
-<span class="sourceLineNo">323</span>   * @param callingMethod<a name="line.323"></a>
-<span class="sourceLineNo">324</span>   * @param conf<a name="line.324"></a>
-<span class="sourceLineNo">325</span>   * @param isReadOnly<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * @param families<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * @throws IOException<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   * @return A region on which you must call {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)}<a name="line.328"></a>
-<span class="sourceLineNo">329</span>   *         when done.<a name="line.329"></a>
-<span class="sourceLineNo">330</span>   */<a name="line.330"></a>
-<span class="sourceLineNo">331</span>  private static HRegion initHRegion(TableName tableName, byte[] startKey, byte[] stopKey,<a name="line.331"></a>
-<span class="sourceLineNo">332</span>      String callingMethod, Configuration conf, HBaseTestingUtility test_util, boolean isReadOnly,<a name="line.332"></a>
-<span class="sourceLineNo">333</span>      Durability durability, WAL wal, byte[]... families) throws IOException {<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    return test_util.createLocalHRegion(tableName, startKey, stopKey, isReadOnly, durability, wal,<a name="line.334"></a>
-<span class="sourceLineNo">335</span>      families);<a name="line.335"></a>
-<span class="sourceLineNo">336</span>  }<a name="line.336"></a>
-<span class="sourceLineNo">337</span>}<a name="line.337"></a>
+<span class="sourceLineNo">091</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.91"></a>
+<span class="sourceLineNo">092</span>  }<a name="line.92"></a>
+<span class="sourceLineNo">093</span><a name="line.93"></a>
+<span class="sourceLineNo">094</span>  @After<a name="line.94"></a>
+<span class="sourceLineNo">095</span>  public void tearDown() throws Exception {<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    EnvironmentEdgeManagerTestHelper.reset();<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    LOG.info("Cleaning test directory: " + test_util.getDataTestDir());<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    test_util.cleanupTestDir();<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    CacheConfig.clearGlobalInstances();<a name="line.99"></a>
+<span class="sourceLineNo">100</span>  }<a name="line.100"></a>
+<span class="sourceLineNo">101</span><a name="line.101"></a>
+<span class="sourceLineNo">102</span>  String getName() {<a name="line.102"></a>
+<span class="sourceLineNo">103</span>    return name.getMethodName();<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  }<a name="line.104"></a>
+<span class="sourceLineNo">105</span><a name="line.105"></a>
+<span class="sourceLineNo">106</span>  @Test<a name="line.106"></a>
+<span class="sourceLineNo">107</span>  public void testBasicScanWithLRUCache() throws IOException {<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    setUp(false);<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    byte[] row1 = Bytes.toBytes("row1");<a name="line.109"></a>
+<span class="sourceLineNo">110</span>    byte[] qf1 = Bytes.toBytes("qualifier1");<a name="line.110"></a>
+<span class="sourceLineNo">111</span>    byte[] qf2 = Bytes.toBytes("qualifier2");<a name="line.111"></a>
+<span class="sourceLineNo">112</span>    byte[] fam1 = Bytes.toBytes("lrucache");<a name="line.112"></a>
+<span class="sourceLineNo">113</span><a name="line.113"></a>
+<span class="sourceLineNo">114</span>    long ts1 = 1; // System.currentTimeMillis();<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    long ts2 = ts1 + 1;<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    long ts3 = ts1 + 2;<a name="line.116"></a>
+<span class="sourceLineNo">117</span><a name="line.117"></a>
+<span class="sourceLineNo">118</span>    // Setting up region<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    String method = this.getName();<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    this.region = initHRegion(tableName, method, conf, test_util, fam1);<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    try {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>      List&lt;Cell&gt; expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false);<a name="line.122"></a>
+<span class="sourceLineNo">123</span><a name="line.123"></a>
+<span class="sourceLineNo">124</span>      List&lt;Cell&gt; actual = performScan(row1, fam1);<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      // Verify result<a name="line.125"></a>
+<span class="sourceLineNo">126</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.126"></a>
+<span class="sourceLineNo">127</span>        assertFalse(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));<a name="line.128"></a>
+<span class="sourceLineNo">129</span>      }<a name="line.129"></a>
+<span class="sourceLineNo">130</span>      // do the scan again and verify. This time it should be from the lru cache<a name="line.130"></a>
+<span class="sourceLineNo">131</span>      actual = performScan(row1, fam1);<a name="line.131"></a>
+<span class="sourceLineNo">132</span>      // Verify result<a name="line.132"></a>
+<span class="sourceLineNo">133</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        assertFalse(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.134"></a>
+<span class="sourceLineNo">135</span>        assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      }<a name="line.136"></a>
+<span class="sourceLineNo">137</span><a name="line.137"></a>
+<span class="sourceLineNo">138</span>    } finally {<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      this.region = null;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    }<a name="line.141"></a>
+<span class="sourceLineNo">142</span>  }<a name="line.142"></a>
+<span class="sourceLineNo">143</span><a name="line.143"></a>
+<span class="sourceLineNo">144</span>  @Test<a name="line.144"></a>
+<span class="sourceLineNo">145</span>  public void testBasicScanWithOffheapBucketCache() throws IOException {<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    setUp(true);<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    byte[] row1 = Bytes.toBytes("row1offheap");<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    byte[] qf1 = Bytes.toBytes("qualifier1");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    byte[] qf2 = Bytes.toBytes("qualifier2");<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    byte[] fam1 = Bytes.toBytes("famoffheap");<a name="line.150"></a>
+<span class="sourceLineNo">151</span><a name="line.151"></a>
+<span class="sourceLineNo">152</span>    long ts1 = 1; // System.currentTimeMillis();<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    long ts2 = ts1 + 1;<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    long ts3 = ts1 + 2;<a name="line.154"></a>
+<span class="sourceLineNo">155</span><a name="line.155"></a>
+<span class="sourceLineNo">156</span>    // Setting up region<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    String method = this.getName();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    this.region = initHRegion(tableName, method, conf, test_util, fam1);<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    try {<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      List&lt;Cell&gt; expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, false);<a name="line.160"></a>
+<span class="sourceLineNo">161</span><a name="line.161"></a>
+<span class="sourceLineNo">162</span>      List&lt;Cell&gt; actual = performScan(row1, fam1);<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      // Verify result<a name="line.163"></a>
+<span class="sourceLineNo">164</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.164"></a>
+<span class="sourceLineNo">165</span>        assertFalse(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.165"></a>
+<span class="sourceLineNo">166</span>        assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));<a name="line.166"></a>
+<span class="sourceLineNo">167</span>      }<a name="line.167"></a>
+<span class="sourceLineNo">168</span>      // Wait for the bucket cache threads to move the data to offheap<a name="line.168"></a>
+<span class="sourceLineNo">169</span>      Thread.sleep(500);<a name="line.169"></a>
+<span class="sourceLineNo">170</span>      // do the scan again and verify. This time it should be from the bucket cache in offheap mode<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      actual = performScan(row1, fam1);<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      // Verify result<a name="line.172"></a>
+<span class="sourceLineNo">173</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        assertTrue(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.174"></a>
+<span class="sourceLineNo">175</span>        assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));<a name="line.175"></a>
+<span class="sourceLineNo">176</span>      }<a name="line.176"></a>
+<span class="sourceLineNo">177</span><a name="line.177"></a>
+<span class="sourceLineNo">178</span>    } catch (InterruptedException e) {<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    } finally {<a name="line.179"></a>
+<span class="sourceLineNo">180</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      this.region = null;<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    }<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  }<a name="line.183"></a>
+<span class="sourceLineNo">184</span><a name="line.184"></a>
+<span class="sourceLineNo">185</span>  @Test<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  public void testBasicScanWithOffheapBucketCacheWithMBB() throws IOException {<a name="line.186"></a>
+<span class="sourceLineNo">187</span>    setUp(true);<a name="line.187"></a>
+<span class="sourceLineNo">188</span>    byte[] row1 = Bytes.toBytes("row1offheap");<a name="line.188"></a>
+<span class="sourceLineNo">189</span>    byte[] qf1 = Bytes.toBytes("qualifier1");<a name="line.189"></a>
+<span class="sourceLineNo">190</span>    byte[] qf2 = Bytes.toBytes("qualifier2");<a name="line.190"></a>
+<span class="sourceLineNo">191</span>    byte[] fam1 = Bytes.toBytes("famoffheap");<a name="line.191"></a>
+<span class="sourceLineNo">192</span><a name="line.192"></a>
+<span class="sourceLineNo">193</span>    long ts1 = 1; // System.currentTimeMillis();<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    long ts2 = ts1 + 1;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    long ts3 = ts1 + 2;<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>    // Setting up region<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    String method = this.getName();<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    this.region = initHRegion(tableName, method, conf, test_util, fam1);<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    try {<a name="line.200"></a>
+<span class="sourceLineNo">201</span>      List&lt;Cell&gt; expected = insertData(row1, qf1, qf2, fam1, ts1, ts2, ts3, true);<a name="line.201"></a>
+<span class="sourceLineNo">202</span><a name="line.202"></a>
+<span class="sourceLineNo">203</span>      List&lt;Cell&gt; actual = performScan(row1, fam1);<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      // Verify result<a name="line.204"></a>
+<span class="sourceLineNo">205</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>        assertFalse(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>        assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));<a name="line.207"></a>
+<span class="sourceLineNo">208</span>      }<a name="line.208"></a>
+<span class="sourceLineNo">209</span>      // Wait for the bucket cache threads to move the data to offheap<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      Thread.sleep(500);<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      // do the scan again and verify. This time it should be from the bucket cache in offheap mode<a name="line.211"></a>
+<span class="sourceLineNo">212</span>      // but one of the cell will be copied due to the asSubByteBuff call<a name="line.212"></a>
+<span class="sourceLineNo">213</span>      Scan scan = new Scan(row1);<a name="line.213"></a>
+<span class="sourceLineNo">214</span>      scan.addFamily(fam1);<a name="line.214"></a>
+<span class="sourceLineNo">215</span>      scan.setMaxVersions(10);<a name="line.215"></a>
+<span class="sourceLineNo">216</span>      actual = new ArrayList&lt;&gt;();<a name="line.216"></a>
+<span class="sourceLineNo">217</span>      InternalScanner scanner = region.getScanner(scan);<a name="line.217"></a>
+<span class="sourceLineNo">218</span><a name="line.218"></a>
+<span class="sourceLineNo">219</span>      boolean hasNext = scanner.next(actual);<a name="line.219"></a>
+<span class="sourceLineNo">220</span>      assertEquals(false, hasNext);<a name="line.220"></a>
+<span class="sourceLineNo">221</span>      // Verify result<a name="line.221"></a>
+<span class="sourceLineNo">222</span>      for (int i = 0; i &lt; expected.size(); i++) {<a name="line.222"></a>
+<span class="sourceLineNo">223</span>        if (i != 5) {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>          // the last cell fetched will be of type shareable but not offheap because<a name="line.224"></a>
+<span class="sourceLineNo">225</span>          // the MBB is copied to form a single cell<a name="line.225"></a>
+<span class="sourceLineNo">226</span>          assertTrue(actual.get(i) instanceof ByteBufferKeyValue);<a name="line.226"></a>
+<span class="sourceLineNo">227</span>        }<a name="line.227"></a>
+<span class="sourceLineNo">228</span>      }<a name="line.228"></a>
+<span class="sourceLineNo">229</span><a name="line.229"></a>
+<span class="sourceLineNo">230</span>    } catch (InterruptedException e) {<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    } finally {<a name="line.231"></a>
+<span class="sourceLineNo">232</span>      HBaseTestingUtility.closeRegionAndWAL(this.region);<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      this.region = null;<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    }<a name="line.234"></a>
+<span class="sourceLineNo">235</span>  }<a name="line.235"></a>
+<span class="sourceLineNo">236</span><a name="line.236"></a>
+<span class="sourceLineNo">237</span>  private List&lt;Cell&gt; insertData(byte[] row1, byte[] qf1, byte[] qf2, byte[] fam1, long ts1,<a name="line.237"></a>
+<span class="sourceLineNo">238</span>      long ts2, long ts3, boolean withVal) throws IOException {<a name="line.238"></a>
+<span class="sourceLineNo">239</span>    // Putting data in Region<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    Put put = null;<a name="line.240"></a>
+<span class="sourceLineNo">241</span>    KeyValue kv13 = null;<a name="line.241"></a>
+<span class="sourceLineNo">242</span>    KeyValue kv12 = null;<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    KeyValue kv11 = null;<a name="line.243"></a>
+<span class="sourceLineNo">244</span><a name="line.244"></a>
+<span class="sourceLineNo">245</span>    KeyValue kv23 = null;<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    KeyValue kv22 = null;<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    KeyValue kv21 = null;<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    if (!withVal) {<a name="line.248"></a>
+<span class="sourceLineNo">249</span>      kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);<a name="line.249"></a>
+<span class="sourceLineNo">250</span>      kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);<a name="line.250"></a>
+<span class="sourceLineNo">251</span>      kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);<a name="line.251"></a>
+<span class="sourceLineNo">252</span><a name="line.252"></a>
+<span class="sourceLineNo">253</span>      kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);<a name="line.253"></a>
+<span class="sourceLineNo">254</span>      kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    } else {<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, val);<a name="line.257"></a>
+<span class="sourceLineNo">258</span>      kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, val);<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, val);<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>      kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, val);<a name="line.261"></a>
+<span class="sourceLineNo">262</span>      kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, val);<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, val);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    }<a name="line.264"></a>
+<span class="sourceLineNo">265</span><a name="line.265"></a>
+<span class="sourceLineNo">266</span>    put = new Put(row1);<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    put.add(kv13);<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    put.add(kv12);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    put.add(kv11);<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    put.add(kv23);<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    put.add(kv22);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    put.add(kv21);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    region.put(put);<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    region.flush(true);<a name="line.274"></a>
+<span class="sourceLineNo">275</span>    HStore store = region.getStore(fam1);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>    while (store.getStorefilesCount() &lt;= 0) {<a name="line.276"></a>
+<span class="sourceLineNo">277</span>      try {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>        Thread.sleep(20);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>      } catch (InterruptedException e) {<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      }<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    }<a name="line.281"></a>
+<span class="sourceLineNo">282</span><a name="line.282"></a>
+<span class="sourceLineNo">283</span>    // Expected<a name="line.283"></a>
+<span class="sourceLineNo">284</span>    List&lt;Cell&gt; expected = new ArrayList&lt;&gt;();<a name="line.284"></a>
+<span class="sourceLineNo">285</span>    expected.add(kv13);<a name="line.285"></a>
+<span class="sourceLineNo">286</span>    expected.add(kv12);<a name="line.286"></a>
+<span class="sourceLineNo">287</span>    expected.add(kv23);<a name="line.287"></a>
+<span class="sourceLineNo">288</span>    expected.add(kv22);<a name="line.288"></a>
+<span class="sourceLineNo">289</span>    return expected;<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  }<a name="line.290"></a>
+<span class="sourceLineNo">291</span><a name="line.291"></a>
+<span class="sourceLineNo">292</span>  private List&lt;Cell&gt; performScan(byte[] row1, byte[] fam1) throws IOException {<a name="line.292"></a>
+<span class="sourceLineNo">293</span>    Scan scan = new Scan(row1);<a name="line.293"></a>
+<span class="sourceLineNo">294</span>    scan.addFamily(fam1);<a name="line.294"></a>
+<span class="sourceLineNo">295</span>    scan.setMaxVersions(MAX_VERSIONS);<a name="line.295"></a>
+<span class="sourceLineNo">296</span>    List&lt;Cell&gt; actual = new ArrayList&lt;&gt;();<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    InternalScanner scanner = region.getScanner(scan);<a name="line.297"></a>
+<span class="sourceLineNo">298</span><a name="line.298"></a>
+<span class="sourceLineNo">299</span>    boolean hasNext = scanner.next(actual);<a name="line.299"></a>
+<span class="sourceLineNo">300</span>    assertEquals(false, hasNext);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>    return actual;<a name="line.301"></a>
+<span class="sourceLineNo">302</span>  }<a name="line.302"></a>
+<span class="sourceLineNo">303</span><a name="line.303"></a>
+<span class="sourceLineNo">304</span>  private static HRegion initHRegion(TableName tableName, String callingMethod, Configuration conf,<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      HBaseTestingUtility test_util, byte[]... families) throws IOException {<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    return initHRegion(tableName, null, null, callingMethod, conf, test_util, false, families);<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private static HRegion initHRegion(TableName tableName, byte[] startKey, byte[] stopKey,<a name="line.309"></a>
+<span class="sourceLineNo">310</span>      String callingMethod, Configuration conf, HBaseTestingUtility test_util, boolean isReadOnly,<a name="line.310"></a>
+<span class="sourceLineNo">311</span>      byte[]... families) throws IOException {<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    Path logDir = test_util.getDataTestDirOnTestFS(callingMethod + ".log");<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    HRegionInfo hri = new HRegionInfo(tableName, startKey, stopKey);<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    final WAL wal = HBaseTestingUtility.createWal(conf, logDir, hri);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    return initHRegion(tableName, startKey, stopKey, callingMethod, conf, test_util, isReadOnly,<a name="line.316"></a>
+<span class="sourceLineNo">317</span>      Durability.SYNC_WAL, wal, families);<a name="line.317"></a>
+<span class="sourceLineNo">318</span>  }<a name="line.318"></a>
+<span class="sourceLineNo">319</span><a name="line.319"></a>
+<span class="sourceLineNo">320</span>  /**<a name="line.320"></a>
+<span class="sourceLineNo">321</span>   * @param tableName<a name="line.321"></a>
+<span class="sourceLineNo">322</span>   * @param startKey<a name="line.322"></a>
+<span class="sourceLineNo">323</span>   * @param stopKey<a name="line.323"></a>
+<span class="sourceLineNo">324</span>   * @param callingMethod<a name="line.324"></a>
+<span class="sourceLineNo">325</span>   * @param conf<a name="line.325"></a>
+<span class="sourceLineNo">326</span>   * @param isReadOnly<a name="line.326"></a>
+<span class="sourceLineNo">327</span>   * @param families<a name="line.327"></a>
+<span class="sourceLineNo">328</span>   * @throws IOException<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * @return A region on which you must call {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)}<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   *         when done.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  private static HRegion initHRegion(TableName tableName, byte[] startKey, byte[] stopKey,<a name="line.332"></a>
+<span class="sourceLineNo">333</span>      String callingMethod, Configuration conf, HBaseTestingUtility test_util, boolean isReadOnly,<a name="line.333"></a>
+<span class="sourceLineNo">334</span>      Durability durability, WAL wal, byte[]... families) throws IOException {<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    return test_util.createLocalHRegion(tableName, startKey, stopKey, isReadOnly, durability, wal,<a name="line.335"></a>
+<span class="sourceLineNo">336</span>      families);<a name="line.336"></a>
+<span class="sourceLineNo">337</span>  }<a name="line.337"></a>
+<span class="sourceLineNo">338</span>}<a name="line.338"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.html
index a056034..8a18927 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.html
@@ -132,20 +132,21 @@
 <span class="sourceLineNo">124</span><a name="line.124"></a>
 <span class="sourceLineNo">125</span>    Scan scan = new Scan(Bytes.toBytes("aaa"), Bytes.toBytes("aaz"));<a name="line.125"></a>
 <span class="sourceLineNo">126</span>    CacheConfig.blockCacheDisabled = false;<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache();<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    cache.clearCache();<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    InternalScanner scanner = region.getScanner(scan);<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    List&lt;Cell&gt; results = new ArrayList&lt;&gt;();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    while (scanner.next(results)) {<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    }<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    scanner.close();<a name="line.134"></a>
-<span class="sourceLineNo">135</span>    assertEquals(0, results.size());<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    Set&lt;String&gt; accessedFiles = cache.getCachedFileNamesForTest();<a name="line.136"></a>
-<span class="sourceLineNo">137</span>    assertEquals(expectedCount, accessedFiles.size());<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    HBaseTestingUtility.closeRegionAndWAL(region);<a name="line.138"></a>
-<span class="sourceLineNo">139</span>  }<a name="line.139"></a>
-<span class="sourceLineNo">140</span>}<a name="line.140"></a>
+<span class="sourceLineNo">127</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache();<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    cache.clearCache();<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    InternalScanner scanner = region.getScanner(scan);<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    List&lt;Cell&gt; results = new ArrayList&lt;&gt;();<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    while (scanner.next(results)) {<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    }<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    scanner.close();<a name="line.135"></a>
+<span class="sourceLineNo">136</span>    assertEquals(0, results.size());<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    Set&lt;String&gt; accessedFiles = cache.getCachedFileNamesForTest();<a name="line.137"></a>
+<span class="sourceLineNo">138</span>    assertEquals(expectedCount, accessedFiles.size());<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    HBaseTestingUtility.closeRegionAndWAL(region);<a name="line.139"></a>
+<span class="sourceLineNo">140</span>  }<a name="line.140"></a>
+<span class="sourceLineNo">141</span>}<a name="line.141"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.html
index 6194ade..70ad44b 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.html
@@ -112,67 +112,68 @@
 <span class="sourceLineNo">104</span>  @Test<a name="line.104"></a>
 <span class="sourceLineNo">105</span>  public void testScannerSelection() throws IOException {<a name="line.105"></a>
 <span class="sourceLineNo">106</span>    Configuration conf = TEST_UTIL.getConfiguration();<a name="line.106"></a>
-<span class="sourceLineNo">107</span>    conf.setBoolean("hbase.store.delete.expired.storefile", false);<a name="line.107"></a>
-<span class="sourceLineNo">108</span>    HColumnDescriptor hcd =<a name="line.108"></a>
-<span class="sourceLineNo">109</span>      new HColumnDescriptor(FAMILY_BYTES)<a name="line.109"></a>
-<span class="sourceLineNo">110</span>          .setMaxVersions(Integer.MAX_VALUE)<a name="line.110"></a>
-<span class="sourceLineNo">111</span>          .setTimeToLive(TTL_SECONDS);<a name="line.111"></a>
-<span class="sourceLineNo">112</span>    HTableDescriptor htd = new HTableDescriptor(TABLE);<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    htd.addFamily(hcd);<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    HRegionInfo info = new HRegionInfo(TABLE);<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    HRegion region = HBaseTestingUtility.createRegionAndWAL(info,<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      TEST_UTIL.getDataTestDir(info.getEncodedName()), conf, htd);<a name="line.116"></a>
-<span class="sourceLineNo">117</span><a name="line.117"></a>
-<span class="sourceLineNo">118</span>    long ts = EnvironmentEdgeManager.currentTime();<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    long version = 0; //make sure each new set of Put's have a new ts<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    for (int iFile = 0; iFile &lt; totalNumFiles; ++iFile) {<a name="line.120"></a>
-<span class="sourceLineNo">121</span>      if (iFile == NUM_EXPIRED_FILES) {<a name="line.121"></a>
-<span class="sourceLineNo">122</span>        Threads.sleepWithoutInterrupt(TTL_MS);<a name="line.122"></a>
-<span class="sourceLineNo">123</span>        version += TTL_MS;<a name="line.123"></a>
-<span class="sourceLineNo">124</span>      }<a name="line.124"></a>
-<span class="sourceLineNo">125</span><a name="line.125"></a>
-<span class="sourceLineNo">126</span>      for (int iRow = 0; iRow &lt; NUM_ROWS; ++iRow) {<a name="line.126"></a>
-<span class="sourceLineNo">127</span>        Put put = new Put(Bytes.toBytes("row" + iRow));<a name="line.127"></a>
-<span class="sourceLineNo">128</span>        for (int iCol = 0; iCol &lt; NUM_COLS_PER_ROW; ++iCol) {<a name="line.128"></a>
-<span class="sourceLineNo">129</span>          put.addColumn(FAMILY_BYTES, Bytes.toBytes("col" + iCol), ts + version,<a name="line.129"></a>
-<span class="sourceLineNo">130</span>                  Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));<a name="line.130"></a>
-<span class="sourceLineNo">131</span>        }<a name="line.131"></a>
-<span class="sourceLineNo">132</span>        region.put(put);<a name="line.132"></a>
-<span class="sourceLineNo">133</span>      }<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      region.flush(true);<a name="line.134"></a>
-<span class="sourceLineNo">135</span>      version++;<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>    Scan scan = new Scan();<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    scan.setMaxVersions(Integer.MAX_VALUE);<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache();<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    cache.clearCache();<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    InternalScanner scanner = region.getScanner(scan);<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    List&lt;Cell&gt; results = new ArrayList&lt;&gt;();<a name="line.144"></a>
-<span class="sourceLineNo">145</span>    final int expectedKVsPerRow = numFreshFiles * NUM_COLS_PER_ROW;<a name="line.145"></a>
-<span class="sourceLineNo">146</span>    int numReturnedRows = 0;<a name="line.146"></a>
-<span class="sourceLineNo">147</span>    LOG.info("Scanning the entire table");<a name="line.147"></a>
-<span class="sourceLineNo">148</span>    while (scanner.next(results) || results.size() &gt; 0) {<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      assertEquals(expectedKVsPerRow, results.size());<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      ++numReturnedRows;<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      results.clear();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>    }<a name="line.152"></a>
-<span class="sourceLineNo">153</span>    assertEquals(NUM_ROWS, numReturnedRows);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    Set&lt;String&gt; accessedFiles = cache.getCachedFileNamesForTest();<a name="line.154"></a>
-<span class="sourceLineNo">155</span>    LOG.debug("Files accessed during scan: " + accessedFiles);<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>    // Exercise both compaction codepaths.<a name="line.157"></a>
-<span class="sourceLineNo">158</span>    if (explicitCompaction) {<a name="line.158"></a>
-<span class="sourceLineNo">159</span>      HStore store = region.getStore(FAMILY_BYTES);<a name="line.159"></a>
-<span class="sourceLineNo">160</span>      store.compactRecentForTestingAssumingDefaultPolicy(totalNumFiles);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>    } else {<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      region.compact(false);<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    }<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    HBaseTestingUtility.closeRegionAndWAL(region);<a name="line.165"></a>
-<span class="sourceLineNo">166</span>  }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>}<a name="line.167"></a>
+<span class="sourceLineNo">107</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.107"></a>
+<span class="sourceLineNo">108</span>    conf.setBoolean("hbase.store.delete.expired.storefile", false);<a name="line.108"></a>
+<span class="sourceLineNo">109</span>    HColumnDescriptor hcd =<a name="line.109"></a>
+<span class="sourceLineNo">110</span>      new HColumnDescriptor(FAMILY_BYTES)<a name="line.110"></a>
+<span class="sourceLineNo">111</span>          .setMaxVersions(Integer.MAX_VALUE)<a name="line.111"></a>
+<span class="sourceLineNo">112</span>          .setTimeToLive(TTL_SECONDS);<a name="line.112"></a>
+<span class="sourceLineNo">113</span>    HTableDescriptor htd = new HTableDescriptor(TABLE);<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    htd.addFamily(hcd);<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    HRegionInfo info = new HRegionInfo(TABLE);<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    HRegion region = HBaseTestingUtility.createRegionAndWAL(info,<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      TEST_UTIL.getDataTestDir(info.getEncodedName()), conf, htd);<a name="line.117"></a>
+<span class="sourceLineNo">118</span><a name="line.118"></a>
+<span class="sourceLineNo">119</span>    long ts = EnvironmentEdgeManager.currentTime();<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    long version = 0; //make sure each new set of Put's have a new ts<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    for (int iFile = 0; iFile &lt; totalNumFiles; ++iFile) {<a name="line.121"></a>
+<span class="sourceLineNo">122</span>      if (iFile == NUM_EXPIRED_FILES) {<a name="line.122"></a>
+<span class="sourceLineNo">123</span>        Threads.sleepWithoutInterrupt(TTL_MS);<a name="line.123"></a>
+<span class="sourceLineNo">124</span>        version += TTL_MS;<a name="line.124"></a>
+<span class="sourceLineNo">125</span>      }<a name="line.125"></a>
+<span class="sourceLineNo">126</span><a name="line.126"></a>
+<span class="sourceLineNo">127</span>      for (int iRow = 0; iRow &lt; NUM_ROWS; ++iRow) {<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        Put put = new Put(Bytes.toBytes("row" + iRow));<a name="line.128"></a>
+<span class="sourceLineNo">129</span>        for (int iCol = 0; iCol &lt; NUM_COLS_PER_ROW; ++iCol) {<a name="line.129"></a>
+<span class="sourceLineNo">130</span>          put.addColumn(FAMILY_BYTES, Bytes.toBytes("col" + iCol), ts + version,<a name="line.130"></a>
+<span class="sourceLineNo">131</span>                  Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));<a name="line.131"></a>
+<span class="sourceLineNo">132</span>        }<a name="line.132"></a>
+<span class="sourceLineNo">133</span>        region.put(put);<a name="line.133"></a>
+<span class="sourceLineNo">134</span>      }<a name="line.134"></a>
+<span class="sourceLineNo">135</span>      region.flush(true);<a name="line.135"></a>
+<span class="sourceLineNo">136</span>      version++;<a name="line.136"></a>
+<span class="sourceLineNo">137</span>    }<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>    Scan scan = new Scan();<a name="line.139"></a>
+<span class="sourceLineNo">140</span>    scan.setMaxVersions(Integer.MAX_VALUE);<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    CacheConfig cacheConf = new CacheConfig(conf);<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache();<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    cache.clearCache();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    InternalScanner scanner = region.getScanner(scan);<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    List&lt;Cell&gt; results = new ArrayList&lt;&gt;();<a name="line.145"></a>
+<span class="sourceLineNo">146</span>    final int expectedKVsPerRow = numFreshFiles * NUM_COLS_PER_ROW;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>    int numReturnedRows = 0;<a name="line.147"></a>
+<span class="sourceLineNo">148</span>    LOG.info("Scanning the entire table");<a name="line.148"></a>
+<span class="sourceLineNo">149</span>    while (scanner.next(results) || results.size() &gt; 0) {<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      assertEquals(expectedKVsPerRow, results.size());<a name="line.150"></a>
+<span class="sourceLineNo">151</span>      ++numReturnedRows;<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      results.clear();<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    }<a name="line.153"></a>
+<span class="sourceLineNo">154</span>    assertEquals(NUM_ROWS, numReturnedRows);<a name="line.154"></a>
+<span class="sourceLineNo">155</span>    Set&lt;String&gt; accessedFiles = cache.getCachedFileNamesForTest();<a name="line.155"></a>
+<span class="sourceLineNo">156</span>    LOG.debug("Files accessed during scan: " + accessedFiles);<a name="line.156"></a>
+<span class="sourceLineNo">157</span><a name="line.157"></a>
+<span class="sourceLineNo">158</span>    // Exercise both compaction codepaths.<a name="line.158"></a>
+<span class="sourceLineNo">159</span>    if (explicitCompaction) {<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      HStore store = region.getStore(FAMILY_BYTES);<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      store.compactRecentForTestingAssumingDefaultPolicy(totalNumFiles);<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    } else {<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      region.compact(false);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    }<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span>    HBaseTestingUtility.closeRegionAndWAL(region);<a name="line.166"></a>
+<span class="sourceLineNo">167</span>  }<a name="line.167"></a>
+<span class="sourceLineNo">168</span>}<a name="line.168"></a>
 
 
 


[10/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html
index 3fc11a2..446e842 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html
@@ -514,7 +514,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>tearDown</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html#line.188">tearDown</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html#line.189">tearDown</a>()
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -528,7 +528,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testCacheOnWriteInSchema</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html#line.214">testCacheOnWriteInSchema</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html#line.215">testCacheOnWriteInSchema</a>()
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -542,7 +542,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>readStoreFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html#line.224">readStoreFile</a>(org.apache.hadoop.fs.Path&nbsp;path)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html#line.225">readStoreFile</a>(org.apache.hadoop.fs.Path&nbsp;path)
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -556,7 +556,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>generateKeyType</h4>
-<pre>private static&nbsp;org.apache.hadoop.hbase.KeyValue.Type&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html#line.260">generateKeyType</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true" title="class or interface in java.util">Random</a>&nbsp;rand)</pre>
+<pre>private static&nbsp;org.apache.hadoop.hbase.KeyValue.Type&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html#line.261">generateKeyType</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true" title="class or interface in java.util">Random</a>&nbsp;rand)</pre>
 </li>
 </ul>
 <a name="writeStoreFile-org.apache.hadoop.hbase.regionserver.StoreFileWriter-">
@@ -565,7 +565,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>writeStoreFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html#line.276">writeStoreFile</a>(org.apache.hadoop.hbase.regionserver.StoreFileWriter&nbsp;writer)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html#line.277">writeStoreFile</a>(org.apache.hadoop.hbase.regionserver.StoreFileWriter&nbsp;writer)
                      throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html
index e7965fc..3878ce8 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html
@@ -534,7 +534,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>createSortedKeyValues</h4>
-<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.KeyValue&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.147">createSortedKeyValues</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true" title="class or interface in java.util">Random</a>&nbsp;rand,
+<pre>private&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.KeyValue&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.148">createSortedKeyValues</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true" title="class or interface in java.util">Random</a>&nbsp;rand,
                                                                      int&nbsp;n)</pre>
 </li>
 </ul>
@@ -544,7 +544,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testCompoundBloomFilter</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.156">testCompoundBloomFilter</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.157">testCompoundBloomFilter</a>()
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -558,7 +558,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>validateFalsePosRate</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.183">validateFalsePosRate</a>(double&nbsp;falsePosRate,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.184">validateFalsePosRate</a>(double&nbsp;falsePosRate,
                                   int&nbsp;nTrials,
                                   double&nbsp;zValueBoundary,
                                   org.apache.hadoop.hbase.io.hfile.CompoundBloomFilter&nbsp;cbf,
@@ -583,7 +583,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>readStoreFile</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.204">readStoreFile</a>(int&nbsp;t,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.205">readStoreFile</a>(int&nbsp;t,
                            org.apache.hadoop.hbase.regionserver.BloomType&nbsp;bt,
                            <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.KeyValue&gt;&nbsp;kvs,
                            org.apache.hadoop.fs.Path&nbsp;sfPath)
@@ -600,7 +600,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isInBloom</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.287">isInBloom</a>(org.apache.hadoop.hbase.regionserver.StoreFileScanner&nbsp;scanner,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.288">isInBloom</a>(org.apache.hadoop.hbase.regionserver.StoreFileScanner&nbsp;scanner,
                           byte[]&nbsp;row,
                           org.apache.hadoop.hbase.regionserver.BloomType&nbsp;bt,
                           <a href="https://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true" title="class or interface in java.util">Random</a>&nbsp;rand)</pre>
@@ -612,7 +612,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>isInBloom</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.292">isInBloom</a>(org.apache.hadoop.hbase.regionserver.StoreFileScanner&nbsp;scanner,
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.293">isInBloom</a>(org.apache.hadoop.hbase.regionserver.StoreFileScanner&nbsp;scanner,
                           byte[]&nbsp;row,
                           byte[]&nbsp;qualifier)</pre>
 </li>
@@ -623,7 +623,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>writeStoreFile</h4>
-<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.302">writeStoreFile</a>(int&nbsp;t,
+<pre>private&nbsp;org.apache.hadoop.fs.Path&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.303">writeStoreFile</a>(int&nbsp;t,
                                                  org.apache.hadoop.hbase.regionserver.BloomType&nbsp;bt,
                                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.hbase.KeyValue&gt;&nbsp;kvs)
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -639,7 +639,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testCompoundBloomSizing</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.344">testCompoundBloomSizing</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.345">testCompoundBloomSizing</a>()</pre>
 </li>
 </ul>
 <a name="testCreateKey--">
@@ -648,7 +648,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testCreateKey</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.360">testCreateKey</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.html#line.361">testCreateKey</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html
index a91f221..17cc8c2 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html
@@ -886,7 +886,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/HBaseTestCase.html" titl
 <ul class="blockList">
 <li class="blockList">
 <h4>testCacheOnWriteEvictOnClose</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html#line.931">testCacheOnWriteEvictOnClose</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestHStoreFile.html#line.930">testCacheOnWriteEvictOnClose</a>()
                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true" title="class or interface in java.lang">Exception</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html
index fc10eac..c23d33e 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 <li class="blockList">
 <hr>
 <br>
-<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.63">TestRecoveredEdits</a>
+<pre>public class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.64">TestRecoveredEdits</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 <div class="block">Tests around replay of recovered.edits content.</div>
 </li>
@@ -223,7 +223,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>CLASS_RULE</h4>
-<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.66">CLASS_RULE</a></pre>
+<pre>public static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseClassTestRule.html" title="class in org.apache.hadoop.hbase">HBaseClassTestRule</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.67">CLASS_RULE</a></pre>
 </li>
 </ul>
 <a name="TEST_UTIL">
@@ -232,7 +232,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>TEST_UTIL</h4>
-<pre>private static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.69">TEST_UTIL</a></pre>
+<pre>private static final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/HBaseTestingUtility.html" title="class in org.apache.hadoop.hbase">HBaseTestingUtility</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.70">TEST_UTIL</a></pre>
 </li>
 </ul>
 <a name="LOG">
@@ -241,7 +241,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.70">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.71">LOG</a></pre>
 </li>
 </ul>
 <a name="testName">
@@ -250,7 +250,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>testName</h4>
-<pre>public&nbsp;org.junit.rules.TestName <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.71">testName</a></pre>
+<pre>public&nbsp;org.junit.rules.TestName <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.72">testName</a></pre>
 </li>
 </ul>
 </li>
@@ -267,7 +267,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TestRecoveredEdits</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.63">TestRecoveredEdits</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.64">TestRecoveredEdits</a>()</pre>
 </li>
 </ul>
 </li>
@@ -284,7 +284,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testReplayWorksThoughLotsOfFlushing</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.81">testReplayWorksThoughLotsOfFlushing</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.82">testReplayWorksThoughLotsOfFlushing</a>()
                                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">HBASE-12782 ITBLL fails for me if generator does anything but 5M per maptask.
  Create a region. Close it. Then copy into place a file to replay, one that is bigger than
@@ -302,7 +302,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>testReplayWorksWithMemoryCompactionPolicy</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.88">testReplayWorksWithMemoryCompactionPolicy</a>(org.apache.hadoop.hbase.MemoryCompactionPolicy&nbsp;policy)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.90">testReplayWorksWithMemoryCompactionPolicy</a>(org.apache.hadoop.hbase.MemoryCompactionPolicy&nbsp;policy)
                                                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -316,7 +316,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>verifyAllEditsMadeItIn</h4>
-<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.168">verifyAllEditsMadeItIn</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
+<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.html#line.170">verifyAllEditsMadeItIn</a>(org.apache.hadoop.fs.FileSystem&nbsp;fs,
                                    org.apache.hadoop.conf.Configuration&nbsp;conf,
                                    org.apache.hadoop.fs.Path&nbsp;edits,
                                    org.apache.hadoop.hbase.regionserver.HRegion&nbsp;region)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html
index 33fde76..d9c31be 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.html
@@ -120,90 +120,91 @@
 <span class="sourceLineNo">112</span>    if(includeTags) {<a name="line.112"></a>
 <span class="sourceLineNo">113</span>      testUtil.getConfiguration().setInt(HFile.FORMAT_VERSION_KEY, 3);<a name="line.113"></a>
 <span class="sourceLineNo">114</span>    }<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    LruBlockCache cache =<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      (LruBlockCache)new CacheConfig(testUtil.getConfiguration()).getBlockCache();<a name="line.116"></a>
-<span class="sourceLineNo">117</span>    cache.clearCache();<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    // Need to disable default row bloom filter for this test to pass.<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    HColumnDescriptor hcd = (new HColumnDescriptor(CF_NAME)).setMaxVersions(MAX_VERSIONS).<a name="line.119"></a>
-<span class="sourceLineNo">120</span>        setDataBlockEncoding(encoding).<a name="line.120"></a>
-<span class="sourceLineNo">121</span>        setBlocksize(BLOCK_SIZE).<a name="line.121"></a>
-<span class="sourceLineNo">122</span>        setBloomFilterType(BloomType.NONE).<a name="line.122"></a>
-<span class="sourceLineNo">123</span>        setCompressTags(compressTags);<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    HRegion region = testUtil.createTestRegion(TABLE_NAME, hcd);<a name="line.124"></a>
-<span class="sourceLineNo">125</span><a name="line.125"></a>
-<span class="sourceLineNo">126</span>    //write the data, but leave some in the memstore<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    doPuts(region);<a name="line.127"></a>
-<span class="sourceLineNo">128</span><a name="line.128"></a>
-<span class="sourceLineNo">129</span>    //verify correctness when memstore contains data<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    doGets(region);<a name="line.130"></a>
-<span class="sourceLineNo">131</span><a name="line.131"></a>
-<span class="sourceLineNo">132</span>    //verify correctness again after compacting<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    region.compact(false);<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    doGets(region);<a name="line.134"></a>
-<span class="sourceLineNo">135</span><a name="line.135"></a>
-<span class="sourceLineNo">136</span>    Map&lt;DataBlockEncoding, Integer&gt; encodingCounts = cache.getEncodingCountsForTest();<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>    // Ensure that compactions don't pollute the cache with unencoded blocks<a name="line.138"></a>
-<span class="sourceLineNo">139</span>    // in case of in-cache-only encoding.<a name="line.139"></a>
-<span class="sourceLineNo">140</span>    System.err.println("encodingCounts=" + encodingCounts);<a name="line.140"></a>
-<span class="sourceLineNo">141</span>    assertEquals(1, encodingCounts.size());<a name="line.141"></a>
-<span class="sourceLineNo">142</span>    DataBlockEncoding encodingInCache = encodingCounts.keySet().iterator().next();<a name="line.142"></a>
-<span class="sourceLineNo">143</span>    assertEquals(encoding, encodingInCache);<a name="line.143"></a>
-<span class="sourceLineNo">144</span>    assertTrue(encodingCounts.get(encodingInCache) &gt; 0);<a name="line.144"></a>
-<span class="sourceLineNo">145</span>  }<a name="line.145"></a>
-<span class="sourceLineNo">146</span><a name="line.146"></a>
+<span class="sourceLineNo">115</span>    CacheConfig.instantiateBlockCache(testUtil.getConfiguration());<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    LruBlockCache cache =<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      (LruBlockCache)new CacheConfig(testUtil.getConfiguration()).getBlockCache();<a name="line.117"></a>
+<span class="sourceLineNo">118</span>    cache.clearCache();<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    // Need to disable default row bloom filter for this test to pass.<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    HColumnDescriptor hcd = (new HColumnDescriptor(CF_NAME)).setMaxVersions(MAX_VERSIONS).<a name="line.120"></a>
+<span class="sourceLineNo">121</span>        setDataBlockEncoding(encoding).<a name="line.121"></a>
+<span class="sourceLineNo">122</span>        setBlocksize(BLOCK_SIZE).<a name="line.122"></a>
+<span class="sourceLineNo">123</span>        setBloomFilterType(BloomType.NONE).<a name="line.123"></a>
+<span class="sourceLineNo">124</span>        setCompressTags(compressTags);<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    HRegion region = testUtil.createTestRegion(TABLE_NAME, hcd);<a name="line.125"></a>
+<span class="sourceLineNo">126</span><a name="line.126"></a>
+<span class="sourceLineNo">127</span>    //write the data, but leave some in the memstore<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    doPuts(region);<a name="line.128"></a>
+<span class="sourceLineNo">129</span><a name="line.129"></a>
+<span class="sourceLineNo">130</span>    //verify correctness when memstore contains data<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    doGets(region);<a name="line.131"></a>
+<span class="sourceLineNo">132</span><a name="line.132"></a>
+<span class="sourceLineNo">133</span>    //verify correctness again after compacting<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    region.compact(false);<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    doGets(region);<a name="line.135"></a>
+<span class="sourceLineNo">136</span><a name="line.136"></a>
+<span class="sourceLineNo">137</span>    Map&lt;DataBlockEncoding, Integer&gt; encodingCounts = cache.getEncodingCountsForTest();<a name="line.137"></a>
+<span class="sourceLineNo">138</span><a name="line.138"></a>
+<span class="sourceLineNo">139</span>    // Ensure that compactions don't pollute the cache with unencoded blocks<a name="line.139"></a>
+<span class="sourceLineNo">140</span>    // in case of in-cache-only encoding.<a name="line.140"></a>
+<span class="sourceLineNo">141</span>    System.err.println("encodingCounts=" + encodingCounts);<a name="line.141"></a>
+<span class="sourceLineNo">142</span>    assertEquals(1, encodingCounts.size());<a name="line.142"></a>
+<span class="sourceLineNo">143</span>    DataBlockEncoding encodingInCache = encodingCounts.keySet().iterator().next();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>    assertEquals(encoding, encodingInCache);<a name="line.144"></a>
+<span class="sourceLineNo">145</span>    assertTrue(encodingCounts.get(encodingInCache) &gt; 0);<a name="line.145"></a>
+<span class="sourceLineNo">146</span>  }<a name="line.146"></a>
 <span class="sourceLineNo">147</span><a name="line.147"></a>
-<span class="sourceLineNo">148</span>  private void doPuts(HRegion region) throws IOException{<a name="line.148"></a>
-<span class="sourceLineNo">149</span>    LoadTestKVGenerator dataGenerator = new LoadTestKVGenerator(MIN_VALUE_SIZE, MAX_VALUE_SIZE);<a name="line.149"></a>
-<span class="sourceLineNo">150</span>     for (int i = 0; i &lt; NUM_ROWS; ++i) {<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      byte[] key = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();<a name="line.151"></a>
-<span class="sourceLineNo">152</span>      for (int j = 0; j &lt; NUM_COLS_PER_ROW; ++j) {<a name="line.152"></a>
-<span class="sourceLineNo">153</span>        Put put = new Put(key);<a name="line.153"></a>
-<span class="sourceLineNo">154</span>        put.setDurability(Durability.ASYNC_WAL);<a name="line.154"></a>
-<span class="sourceLineNo">155</span>        byte[] col = Bytes.toBytes(String.valueOf(j));<a name="line.155"></a>
-<span class="sourceLineNo">156</span>        byte[] value = dataGenerator.generateRandomSizeValue(key, col);<a name="line.156"></a>
-<span class="sourceLineNo">157</span>        if (includeTags) {<a name="line.157"></a>
-<span class="sourceLineNo">158</span>          Tag[] tag = new Tag[1];<a name="line.158"></a>
-<span class="sourceLineNo">159</span>          tag[0] = new ArrayBackedTag((byte) 1, "Visibility");<a name="line.159"></a>
-<span class="sourceLineNo">160</span>          KeyValue kv = new KeyValue(key, CF_BYTES, col, HConstants.LATEST_TIMESTAMP, value, tag);<a name="line.160"></a>
-<span class="sourceLineNo">161</span>          put.add(kv);<a name="line.161"></a>
-<span class="sourceLineNo">162</span>        } else {<a name="line.162"></a>
-<span class="sourceLineNo">163</span>          put.addColumn(CF_BYTES, col, value);<a name="line.163"></a>
-<span class="sourceLineNo">164</span>        }<a name="line.164"></a>
-<span class="sourceLineNo">165</span>        if(VERBOSE){<a name="line.165"></a>
-<span class="sourceLineNo">166</span>          KeyValue kvPut = new KeyValue(key, CF_BYTES, col, value);<a name="line.166"></a>
-<span class="sourceLineNo">167</span>          System.err.println(Strings.padFront(i+"", ' ', 4)+" "+kvPut);<a name="line.167"></a>
-<span class="sourceLineNo">168</span>        }<a name="line.168"></a>
-<span class="sourceLineNo">169</span>        region.put(put);<a name="line.169"></a>
-<span class="sourceLineNo">170</span>      }<a name="line.170"></a>
-<span class="sourceLineNo">171</span>      if (i % NUM_ROWS_PER_FLUSH == 0) {<a name="line.171"></a>
-<span class="sourceLineNo">172</span>        region.flush(true);<a name="line.172"></a>
-<span class="sourceLineNo">173</span>      }<a name="line.173"></a>
-<span class="sourceLineNo">174</span>    }<a name="line.174"></a>
-<span class="sourceLineNo">175</span>  }<a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
+<span class="sourceLineNo">148</span><a name="line.148"></a>
+<span class="sourceLineNo">149</span>  private void doPuts(HRegion region) throws IOException{<a name="line.149"></a>
+<span class="sourceLineNo">150</span>    LoadTestKVGenerator dataGenerator = new LoadTestKVGenerator(MIN_VALUE_SIZE, MAX_VALUE_SIZE);<a name="line.150"></a>
+<span class="sourceLineNo">151</span>     for (int i = 0; i &lt; NUM_ROWS; ++i) {<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      byte[] key = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      for (int j = 0; j &lt; NUM_COLS_PER_ROW; ++j) {<a name="line.153"></a>
+<span class="sourceLineNo">154</span>        Put put = new Put(key);<a name="line.154"></a>
+<span class="sourceLineNo">155</span>        put.setDurability(Durability.ASYNC_WAL);<a name="line.155"></a>
+<span class="sourceLineNo">156</span>        byte[] col = Bytes.toBytes(String.valueOf(j));<a name="line.156"></a>
+<span class="sourceLineNo">157</span>        byte[] value = dataGenerator.generateRandomSizeValue(key, col);<a name="line.157"></a>
+<span class="sourceLineNo">158</span>        if (includeTags) {<a name="line.158"></a>
+<span class="sourceLineNo">159</span>          Tag[] tag = new Tag[1];<a name="line.159"></a>
+<span class="sourceLineNo">160</span>          tag[0] = new ArrayBackedTag((byte) 1, "Visibility");<a name="line.160"></a>
+<span class="sourceLineNo">161</span>          KeyValue kv = new KeyValue(key, CF_BYTES, col, HConstants.LATEST_TIMESTAMP, value, tag);<a name="line.161"></a>
+<span class="sourceLineNo">162</span>          put.add(kv);<a name="line.162"></a>
+<span class="sourceLineNo">163</span>        } else {<a name="line.163"></a>
+<span class="sourceLineNo">164</span>          put.addColumn(CF_BYTES, col, value);<a name="line.164"></a>
+<span class="sourceLineNo">165</span>        }<a name="line.165"></a>
+<span class="sourceLineNo">166</span>        if(VERBOSE){<a name="line.166"></a>
+<span class="sourceLineNo">167</span>          KeyValue kvPut = new KeyValue(key, CF_BYTES, col, value);<a name="line.167"></a>
+<span class="sourceLineNo">168</span>          System.err.println(Strings.padFront(i+"", ' ', 4)+" "+kvPut);<a name="line.168"></a>
+<span class="sourceLineNo">169</span>        }<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        region.put(put);<a name="line.170"></a>
+<span class="sourceLineNo">171</span>      }<a name="line.171"></a>
+<span class="sourceLineNo">172</span>      if (i % NUM_ROWS_PER_FLUSH == 0) {<a name="line.172"></a>
+<span class="sourceLineNo">173</span>        region.flush(true);<a name="line.173"></a>
+<span class="sourceLineNo">174</span>      }<a name="line.174"></a>
+<span class="sourceLineNo">175</span>    }<a name="line.175"></a>
+<span class="sourceLineNo">176</span>  }<a name="line.176"></a>
 <span class="sourceLineNo">177</span><a name="line.177"></a>
-<span class="sourceLineNo">178</span>  private void doGets(Region region) throws IOException{<a name="line.178"></a>
-<span class="sourceLineNo">179</span>    for (int i = 0; i &lt; NUM_ROWS; ++i) {<a name="line.179"></a>
-<span class="sourceLineNo">180</span>      final byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();<a name="line.180"></a>
-<span class="sourceLineNo">181</span>      for (int j = 0; j &lt; NUM_COLS_PER_ROW; ++j) {<a name="line.181"></a>
-<span class="sourceLineNo">182</span>        final String qualStr = String.valueOf(j);<a name="line.182"></a>
-<span class="sourceLineNo">183</span>        if (VERBOSE) {<a name="line.183"></a>
-<span class="sourceLineNo">184</span>          System.err.println("Reading row " + i + ", column " + j + " " + Bytes.toString(rowKey)+"/"<a name="line.184"></a>
-<span class="sourceLineNo">185</span>              +qualStr);<a name="line.185"></a>
-<span class="sourceLineNo">186</span>        }<a name="line.186"></a>
-<span class="sourceLineNo">187</span>        final byte[] qualBytes = Bytes.toBytes(qualStr);<a name="line.187"></a>
-<span class="sourceLineNo">188</span>        Get get = new Get(rowKey);<a name="line.188"></a>
-<span class="sourceLineNo">189</span>        get.addColumn(CF_BYTES, qualBytes);<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        Result result = region.get(get);<a name="line.190"></a>
-<span class="sourceLineNo">191</span>        assertEquals(1, result.size());<a name="line.191"></a>
-<span class="sourceLineNo">192</span>        byte[] value = result.getValue(CF_BYTES, qualBytes);<a name="line.192"></a>
-<span class="sourceLineNo">193</span>        assertTrue(LoadTestKVGenerator.verify(value, rowKey, qualBytes));<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      }<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    }<a name="line.195"></a>
-<span class="sourceLineNo">196</span>  }<a name="line.196"></a>
-<span class="sourceLineNo">197</span><a name="line.197"></a>
-<span class="sourceLineNo">198</span>}<a name="line.198"></a>
+<span class="sourceLineNo">178</span><a name="line.178"></a>
+<span class="sourceLineNo">179</span>  private void doGets(Region region) throws IOException{<a name="line.179"></a>
+<span class="sourceLineNo">180</span>    for (int i = 0; i &lt; NUM_ROWS; ++i) {<a name="line.180"></a>
+<span class="sourceLineNo">181</span>      final byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();<a name="line.181"></a>
+<span class="sourceLineNo">182</span>      for (int j = 0; j &lt; NUM_COLS_PER_ROW; ++j) {<a name="line.182"></a>
+<span class="sourceLineNo">183</span>        final String qualStr = String.valueOf(j);<a name="line.183"></a>
+<span class="sourceLineNo">184</span>        if (VERBOSE) {<a name="line.184"></a>
+<span class="sourceLineNo">185</span>          System.err.println("Reading row " + i + ", column " + j + " " + Bytes.toString(rowKey)+"/"<a name="line.185"></a>
+<span class="sourceLineNo">186</span>              +qualStr);<a name="line.186"></a>
+<span class="sourceLineNo">187</span>        }<a name="line.187"></a>
+<span class="sourceLineNo">188</span>        final byte[] qualBytes = Bytes.toBytes(qualStr);<a name="line.188"></a>
+<span class="sourceLineNo">189</span>        Get get = new Get(rowKey);<a name="line.189"></a>
+<span class="sourceLineNo">190</span>        get.addColumn(CF_BYTES, qualBytes);<a name="line.190"></a>
+<span class="sourceLineNo">191</span>        Result result = region.get(get);<a name="line.191"></a>
+<span class="sourceLineNo">192</span>        assertEquals(1, result.size());<a name="line.192"></a>
+<span class="sourceLineNo">193</span>        byte[] value = result.getValue(CF_BYTES, qualBytes);<a name="line.193"></a>
+<span class="sourceLineNo">194</span>        assertTrue(LoadTestKVGenerator.verify(value, rowKey, qualBytes));<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      }<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
+<span class="sourceLineNo">197</span>  }<a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>}<a name="line.199"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html
index 566ee51..8e2e91a 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.html
@@ -27,90 +27,90 @@
 <span class="sourceLineNo">019</span><a name="line.19"></a>
 <span class="sourceLineNo">020</span>import static org.junit.Assert.*;<a name="line.20"></a>
 <span class="sourceLineNo">021</span><a name="line.21"></a>
-<span class="sourceLineNo">022</span>import com.fasterxml.jackson.core.JsonGenerationException;<a name="line.22"></a>
-<span class="sourceLineNo">023</span>import com.fasterxml.jackson.databind.JsonMappingException;<a name="line.23"></a>
-<span class="sourceLineNo">024</span>import java.io.IOException;<a name="line.24"></a>
-<span class="sourceLineNo">025</span>import java.util.Map;<a name="line.25"></a>
-<span class="sourceLineNo">026</span>import java.util.NavigableSet;<a name="line.26"></a>
-<span class="sourceLineNo">027</span>import java.util.Objects;<a name="line.27"></a>
-<span class="sourceLineNo">028</span>import org.apache.hadoop.conf.Configuration;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.29"></a>
-<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.HConstants;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.io.hfile.TestCacheConfig.DataCacheEntry;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.io.hfile.TestCacheConfig.IndexCacheEntry;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import org.apache.hadoop.hbase.testclassification.IOTests;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import org.apache.hadoop.hbase.testclassification.SmallTests;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.junit.After;<a name="line.36"></a>
-<span class="sourceLineNo">037</span>import org.junit.Before;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import org.junit.ClassRule;<a name="line.38"></a>
-<span class="sourceLineNo">039</span>import org.junit.Test;<a name="line.39"></a>
-<span class="sourceLineNo">040</span>import org.junit.experimental.categories.Category;<a name="line.40"></a>
-<span class="sourceLineNo">041</span>import org.slf4j.Logger;<a name="line.41"></a>
-<span class="sourceLineNo">042</span>import org.slf4j.LoggerFactory;<a name="line.42"></a>
-<span class="sourceLineNo">043</span><a name="line.43"></a>
-<span class="sourceLineNo">044</span>@Category({IOTests.class, SmallTests.class})<a name="line.44"></a>
-<span class="sourceLineNo">045</span>public class TestBlockCacheReporting {<a name="line.45"></a>
-<span class="sourceLineNo">046</span><a name="line.46"></a>
-<span class="sourceLineNo">047</span>  @ClassRule<a name="line.47"></a>
-<span class="sourceLineNo">048</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.48"></a>
-<span class="sourceLineNo">049</span>      HBaseClassTestRule.forClass(TestBlockCacheReporting.class);<a name="line.49"></a>
-<span class="sourceLineNo">050</span><a name="line.50"></a>
-<span class="sourceLineNo">051</span>  private static final Logger LOG = LoggerFactory.getLogger(TestBlockCacheReporting.class);<a name="line.51"></a>
-<span class="sourceLineNo">052</span>  private Configuration conf;<a name="line.52"></a>
-<span class="sourceLineNo">053</span><a name="line.53"></a>
-<span class="sourceLineNo">054</span>  @Before<a name="line.54"></a>
-<span class="sourceLineNo">055</span>  public void setUp() throws Exception {<a name="line.55"></a>
-<span class="sourceLineNo">056</span>    CacheConfig.clearGlobalInstances();<a name="line.56"></a>
-<span class="sourceLineNo">057</span>    this.conf = HBaseConfiguration.create();<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  }<a name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  @After<a name="line.60"></a>
-<span class="sourceLineNo">061</span>  public void tearDown() throws Exception {<a name="line.61"></a>
-<span class="sourceLineNo">062</span>    // Let go of current block cache.<a name="line.62"></a>
-<span class="sourceLineNo">063</span>    CacheConfig.clearGlobalInstances();<a name="line.63"></a>
-<span class="sourceLineNo">064</span>  }<a name="line.64"></a>
-<span class="sourceLineNo">065</span><a name="line.65"></a>
-<span class="sourceLineNo">066</span>  private void addDataAndHits(final BlockCache bc, final int count) {<a name="line.66"></a>
-<span class="sourceLineNo">067</span>    Cacheable dce = new DataCacheEntry();<a name="line.67"></a>
-<span class="sourceLineNo">068</span>    Cacheable ice = new IndexCacheEntry();<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    for (int i = 0; i &lt; count; i++) {<a name="line.69"></a>
-<span class="sourceLineNo">070</span>      BlockCacheKey bckd = new BlockCacheKey("f", i);<a name="line.70"></a>
-<span class="sourceLineNo">071</span>      BlockCacheKey bcki = new BlockCacheKey("f", i + count);<a name="line.71"></a>
-<span class="sourceLineNo">072</span>      bc.getBlock(bckd, true, false, true);<a name="line.72"></a>
-<span class="sourceLineNo">073</span>      bc.cacheBlock(bckd, dce);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>      bc.cacheBlock(bcki, ice);<a name="line.74"></a>
-<span class="sourceLineNo">075</span>      bc.getBlock(bckd, true, false, true);<a name="line.75"></a>
-<span class="sourceLineNo">076</span>      bc.getBlock(bcki, true, false, true);<a name="line.76"></a>
-<span class="sourceLineNo">077</span>    }<a name="line.77"></a>
-<span class="sourceLineNo">078</span>    assertEquals(2 * count /*Data and Index blocks*/, bc.getStats().getHitCount());<a name="line.78"></a>
-<span class="sourceLineNo">079</span>    BlockCacheKey bckd = new BlockCacheKey("f", 0);<a name="line.79"></a>
-<span class="sourceLineNo">080</span>    BlockCacheKey bcki = new BlockCacheKey("f", 0 + count);<a name="line.80"></a>
-<span class="sourceLineNo">081</span>    bc.evictBlock(bckd);<a name="line.81"></a>
-<span class="sourceLineNo">082</span>    bc.evictBlock(bcki);<a name="line.82"></a>
-<span class="sourceLineNo">083</span>    bc.getStats().getEvictedCount();<a name="line.83"></a>
-<span class="sourceLineNo">084</span>  }<a name="line.84"></a>
-<span class="sourceLineNo">085</span><a name="line.85"></a>
-<span class="sourceLineNo">086</span>  @Test<a name="line.86"></a>
-<span class="sourceLineNo">087</span>  public void testBucketCache() throws JsonGenerationException, JsonMappingException, IOException {<a name="line.87"></a>
-<span class="sourceLineNo">088</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, 100);<a name="line.89"></a>
-<span class="sourceLineNo">090</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.90"></a>
-<span class="sourceLineNo">091</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.91"></a>
-<span class="sourceLineNo">092</span>    logPerBlock(cc.getBlockCache());<a name="line.92"></a>
-<span class="sourceLineNo">093</span>    final int count = 3;<a name="line.93"></a>
-<span class="sourceLineNo">094</span>    addDataAndHits(cc.getBlockCache(), count);<a name="line.94"></a>
-<span class="sourceLineNo">095</span>    // The below has no asserts.  It is just exercising toString and toJSON code.<a name="line.95"></a>
-<span class="sourceLineNo">096</span>    LOG.info(Objects.toString(cc.getBlockCache().getStats()));<a name="line.96"></a>
-<span class="sourceLineNo">097</span>    BlockCacheUtil.CachedBlocksByFile cbsbf = logPerBlock(cc.getBlockCache());<a name="line.97"></a>
-<span class="sourceLineNo">098</span>    LOG.info(Objects.toString(cbsbf));<a name="line.98"></a>
-<span class="sourceLineNo">099</span>    logPerFile(cbsbf);<a name="line.99"></a>
-<span class="sourceLineNo">100</span>    bucketCacheReport(cc.getBlockCache());<a name="line.100"></a>
-<span class="sourceLineNo">101</span>    LOG.info(BlockCacheUtil.toJSON(cbsbf));<a name="line.101"></a>
-<span class="sourceLineNo">102</span>  }<a name="line.102"></a>
-<span class="sourceLineNo">103</span><a name="line.103"></a>
-<span class="sourceLineNo">104</span>  @Test<a name="line.104"></a>
-<span class="sourceLineNo">105</span>  public void testLruBlockCache() throws JsonGenerationException, JsonMappingException, IOException {<a name="line.105"></a>
+<span class="sourceLineNo">022</span>import java.io.IOException;<a name="line.22"></a>
+<span class="sourceLineNo">023</span>import java.util.Map;<a name="line.23"></a>
+<span class="sourceLineNo">024</span>import java.util.NavigableSet;<a name="line.24"></a>
+<span class="sourceLineNo">025</span>import java.util.Objects;<a name="line.25"></a>
+<span class="sourceLineNo">026</span>import org.apache.hadoop.conf.Configuration;<a name="line.26"></a>
+<span class="sourceLineNo">027</span>import org.apache.hadoop.hbase.HBaseClassTestRule;<a name="line.27"></a>
+<span class="sourceLineNo">028</span>import org.apache.hadoop.hbase.HBaseConfiguration;<a name="line.28"></a>
+<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.HConstants;<a name="line.29"></a>
+<span class="sourceLineNo">030</span>import org.apache.hadoop.hbase.io.hfile.TestCacheConfig.DataCacheEntry;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.io.hfile.TestCacheConfig.IndexCacheEntry;<a name="line.31"></a>
+<span class="sourceLineNo">032</span>import org.apache.hadoop.hbase.testclassification.IOTests;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import org.apache.hadoop.hbase.testclassification.SmallTests;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import org.junit.After;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import org.junit.Before;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import org.junit.ClassRule;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import org.junit.Test;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.junit.experimental.categories.Category;<a name="line.38"></a>
+<span class="sourceLineNo">039</span>import org.slf4j.Logger;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import org.slf4j.LoggerFactory;<a name="line.40"></a>
+<span class="sourceLineNo">041</span><a name="line.41"></a>
+<span class="sourceLineNo">042</span>@Category({IOTests.class, SmallTests.class})<a name="line.42"></a>
+<span class="sourceLineNo">043</span>public class TestBlockCacheReporting {<a name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>  @ClassRule<a name="line.45"></a>
+<span class="sourceLineNo">046</span>  public static final HBaseClassTestRule CLASS_RULE =<a name="line.46"></a>
+<span class="sourceLineNo">047</span>      HBaseClassTestRule.forClass(TestBlockCacheReporting.class);<a name="line.47"></a>
+<span class="sourceLineNo">048</span><a name="line.48"></a>
+<span class="sourceLineNo">049</span>  private static final Logger LOG = LoggerFactory.getLogger(TestBlockCacheReporting.class);<a name="line.49"></a>
+<span class="sourceLineNo">050</span>  private Configuration conf;<a name="line.50"></a>
+<span class="sourceLineNo">051</span><a name="line.51"></a>
+<span class="sourceLineNo">052</span>  @Before<a name="line.52"></a>
+<span class="sourceLineNo">053</span>  public void setUp() throws Exception {<a name="line.53"></a>
+<span class="sourceLineNo">054</span>    CacheConfig.clearGlobalInstances();<a name="line.54"></a>
+<span class="sourceLineNo">055</span>    this.conf = HBaseConfiguration.create();<a name="line.55"></a>
+<span class="sourceLineNo">056</span>  }<a name="line.56"></a>
+<span class="sourceLineNo">057</span><a name="line.57"></a>
+<span class="sourceLineNo">058</span>  @After<a name="line.58"></a>
+<span class="sourceLineNo">059</span>  public void tearDown() throws Exception {<a name="line.59"></a>
+<span class="sourceLineNo">060</span>    // Let go of current block cache.<a name="line.60"></a>
+<span class="sourceLineNo">061</span>    CacheConfig.clearGlobalInstances();<a name="line.61"></a>
+<span class="sourceLineNo">062</span>  }<a name="line.62"></a>
+<span class="sourceLineNo">063</span><a name="line.63"></a>
+<span class="sourceLineNo">064</span>  private void addDataAndHits(final BlockCache bc, final int count) {<a name="line.64"></a>
+<span class="sourceLineNo">065</span>    Cacheable dce = new DataCacheEntry();<a name="line.65"></a>
+<span class="sourceLineNo">066</span>    Cacheable ice = new IndexCacheEntry();<a name="line.66"></a>
+<span class="sourceLineNo">067</span>    for (int i = 0; i &lt; count; i++) {<a name="line.67"></a>
+<span class="sourceLineNo">068</span>      BlockCacheKey bckd = new BlockCacheKey("f", i);<a name="line.68"></a>
+<span class="sourceLineNo">069</span>      BlockCacheKey bcki = new BlockCacheKey("f", i + count);<a name="line.69"></a>
+<span class="sourceLineNo">070</span>      bc.getBlock(bckd, true, false, true);<a name="line.70"></a>
+<span class="sourceLineNo">071</span>      bc.cacheBlock(bckd, dce);<a name="line.71"></a>
+<span class="sourceLineNo">072</span>      bc.cacheBlock(bcki, ice);<a name="line.72"></a>
+<span class="sourceLineNo">073</span>      bc.getBlock(bckd, true, false, true);<a name="line.73"></a>
+<span class="sourceLineNo">074</span>      bc.getBlock(bcki, true, false, true);<a name="line.74"></a>
+<span class="sourceLineNo">075</span>    }<a name="line.75"></a>
+<span class="sourceLineNo">076</span>    assertEquals(2 * count /*Data and Index blocks*/, bc.getStats().getHitCount());<a name="line.76"></a>
+<span class="sourceLineNo">077</span>    BlockCacheKey bckd = new BlockCacheKey("f", 0);<a name="line.77"></a>
+<span class="sourceLineNo">078</span>    BlockCacheKey bcki = new BlockCacheKey("f", 0 + count);<a name="line.78"></a>
+<span class="sourceLineNo">079</span>    bc.evictBlock(bckd);<a name="line.79"></a>
+<span class="sourceLineNo">080</span>    bc.evictBlock(bcki);<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    bc.getStats().getEvictedCount();<a name="line.81"></a>
+<span class="sourceLineNo">082</span>  }<a name="line.82"></a>
+<span class="sourceLineNo">083</span><a name="line.83"></a>
+<span class="sourceLineNo">084</span>  @Test<a name="line.84"></a>
+<span class="sourceLineNo">085</span>  public void testBucketCache() throws IOException {<a name="line.85"></a>
+<span class="sourceLineNo">086</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, 100);<a name="line.87"></a>
+<span class="sourceLineNo">088</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.88"></a>
+<span class="sourceLineNo">089</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.89"></a>
+<span class="sourceLineNo">090</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.90"></a>
+<span class="sourceLineNo">091</span>    logPerBlock(cc.getBlockCache());<a name="line.91"></a>
+<span class="sourceLineNo">092</span>    final int count = 3;<a name="line.92"></a>
+<span class="sourceLineNo">093</span>    addDataAndHits(cc.getBlockCache(), count);<a name="line.93"></a>
+<span class="sourceLineNo">094</span>    // The below has no asserts.  It is just exercising toString and toJSON code.<a name="line.94"></a>
+<span class="sourceLineNo">095</span>    LOG.info(Objects.toString(cc.getBlockCache().getStats()));<a name="line.95"></a>
+<span class="sourceLineNo">096</span>    BlockCacheUtil.CachedBlocksByFile cbsbf = logPerBlock(cc.getBlockCache());<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    LOG.info(Objects.toString(cbsbf));<a name="line.97"></a>
+<span class="sourceLineNo">098</span>    logPerFile(cbsbf);<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    bucketCacheReport(cc.getBlockCache());<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    LOG.info(BlockCacheUtil.toJSON(cbsbf));<a name="line.100"></a>
+<span class="sourceLineNo">101</span>  }<a name="line.101"></a>
+<span class="sourceLineNo">102</span><a name="line.102"></a>
+<span class="sourceLineNo">103</span>  @Test<a name="line.103"></a>
+<span class="sourceLineNo">104</span>  public void testLruBlockCache() throws IOException {<a name="line.104"></a>
+<span class="sourceLineNo">105</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.105"></a>
 <span class="sourceLineNo">106</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.106"></a>
 <span class="sourceLineNo">107</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.107"></a>
 <span class="sourceLineNo">108</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.108"></a>
@@ -139,40 +139,38 @@
 <span class="sourceLineNo">131</span>    }<a name="line.131"></a>
 <span class="sourceLineNo">132</span>  }<a name="line.132"></a>
 <span class="sourceLineNo">133</span><a name="line.133"></a>
-<span class="sourceLineNo">134</span>  private void logPerFile(final BlockCacheUtil.CachedBlocksByFile cbsbf)<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  throws JsonGenerationException, JsonMappingException, IOException {<a name="line.135"></a>
-<span class="sourceLineNo">136</span>    for (Map.Entry&lt;String, NavigableSet&lt;CachedBlock&gt;&gt; e:<a name="line.136"></a>
-<span class="sourceLineNo">137</span>        cbsbf.getCachedBlockStatsByFile().entrySet()) {<a name="line.137"></a>
-<span class="sourceLineNo">138</span>      int count = 0;<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      long size = 0;<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      int countData = 0;<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      long sizeData = 0;<a name="line.141"></a>
-<span class="sourceLineNo">142</span>      for (CachedBlock cb: e.getValue()) {<a name="line.142"></a>
-<span class="sourceLineNo">143</span>        count++;<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        size += cb.getSize();<a name="line.144"></a>
-<span class="sourceLineNo">145</span>        BlockType bt = cb.getBlockType();<a name="line.145"></a>
-<span class="sourceLineNo">146</span>        if (bt != null &amp;&amp; bt.isData()) {<a name="line.146"></a>
-<span class="sourceLineNo">147</span>          countData++;<a name="line.147"></a>
-<span class="sourceLineNo">148</span>          sizeData += cb.getSize();<a name="line.148"></a>
-<span class="sourceLineNo">149</span>        }<a name="line.149"></a>
-<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>      LOG.info("filename=" + e.getKey() + ", count=" + count + ", countData=" + countData +<a name="line.151"></a>
-<span class="sourceLineNo">152</span>          ", size=" + size + ", sizeData=" + sizeData);<a name="line.152"></a>
-<span class="sourceLineNo">153</span>      LOG.info(BlockCacheUtil.toJSON(e.getKey(), e.getValue()));<a name="line.153"></a>
-<span class="sourceLineNo">154</span>    }<a name="line.154"></a>
-<span class="sourceLineNo">155</span>  }<a name="line.155"></a>
-<span class="sourceLineNo">156</span><a name="line.156"></a>
-<span class="sourceLineNo">157</span>  private BlockCacheUtil.CachedBlocksByFile logPerBlock(final BlockCache bc)<a name="line.157"></a>
-<span class="sourceLineNo">158</span>  throws JsonGenerationException, JsonMappingException, IOException {<a name="line.158"></a>
-<span class="sourceLineNo">159</span>    BlockCacheUtil.CachedBlocksByFile cbsbf = new BlockCacheUtil.CachedBlocksByFile();<a name="line.159"></a>
-<span class="sourceLineNo">160</span>    for (CachedBlock cb: bc) {<a name="line.160"></a>
-<span class="sourceLineNo">161</span>      LOG.info(cb.toString());<a name="line.161"></a>
-<span class="sourceLineNo">162</span>      LOG.info(BlockCacheUtil.toJSON(bc));<a name="line.162"></a>
-<span class="sourceLineNo">163</span>      cbsbf.update(cb);<a name="line.163"></a>
-<span class="sourceLineNo">164</span>    }<a name="line.164"></a>
-<span class="sourceLineNo">165</span>    return cbsbf;<a name="line.165"></a>
-<span class="sourceLineNo">166</span>  }<a name="line.166"></a>
-<span class="sourceLineNo">167</span>}<a name="line.167"></a>
+<span class="sourceLineNo">134</span>  private void logPerFile(final BlockCacheUtil.CachedBlocksByFile cbsbf) throws IOException {<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    for (Map.Entry&lt;String, NavigableSet&lt;CachedBlock&gt;&gt; e:<a name="line.135"></a>
+<span class="sourceLineNo">136</span>        cbsbf.getCachedBlockStatsByFile().entrySet()) {<a name="line.136"></a>
+<span class="sourceLineNo">137</span>      int count = 0;<a name="line.137"></a>
+<span class="sourceLineNo">138</span>      long size = 0;<a name="line.138"></a>
+<span class="sourceLineNo">139</span>      int countData = 0;<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      long sizeData = 0;<a name="line.140"></a>
+<span class="sourceLineNo">141</span>      for (CachedBlock cb: e.getValue()) {<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        count++;<a name="line.142"></a>
+<span class="sourceLineNo">143</span>        size += cb.getSize();<a name="line.143"></a>
+<span class="sourceLineNo">144</span>        BlockType bt = cb.getBlockType();<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        if (bt != null &amp;&amp; bt.isData()) {<a name="line.145"></a>
+<span class="sourceLineNo">146</span>          countData++;<a name="line.146"></a>
+<span class="sourceLineNo">147</span>          sizeData += cb.getSize();<a name="line.147"></a>
+<span class="sourceLineNo">148</span>        }<a name="line.148"></a>
+<span class="sourceLineNo">149</span>      }<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      LOG.info("filename=" + e.getKey() + ", count=" + count + ", countData=" + countData +<a name="line.150"></a>
+<span class="sourceLineNo">151</span>          ", size=" + size + ", sizeData=" + sizeData);<a name="line.151"></a>
+<span class="sourceLineNo">152</span>      LOG.info(BlockCacheUtil.toJSON(e.getKey(), e.getValue()));<a name="line.152"></a>
+<span class="sourceLineNo">153</span>    }<a name="line.153"></a>
+<span class="sourceLineNo">154</span>  }<a name="line.154"></a>
+<span class="sourceLineNo">155</span><a name="line.155"></a>
+<span class="sourceLineNo">156</span>  private BlockCacheUtil.CachedBlocksByFile logPerBlock(final BlockCache bc) throws IOException {<a name="line.156"></a>
+<span class="sourceLineNo">157</span>    BlockCacheUtil.CachedBlocksByFile cbsbf = new BlockCacheUtil.CachedBlocksByFile();<a name="line.157"></a>
+<span class="sourceLineNo">158</span>    for (CachedBlock cb : bc) {<a name="line.158"></a>
+<span class="sourceLineNo">159</span>      LOG.info(cb.toString());<a name="line.159"></a>
+<span class="sourceLineNo">160</span>      LOG.info(BlockCacheUtil.toJSON(bc));<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      cbsbf.update(cb);<a name="line.161"></a>
+<span class="sourceLineNo">162</span>    }<a name="line.162"></a>
+<span class="sourceLineNo">163</span>    return cbsbf;<a name="line.163"></a>
+<span class="sourceLineNo">164</span>  }<a name="line.164"></a>
+<span class="sourceLineNo">165</span>}<a name="line.165"></a>
 
 
 


[07/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
index f8e34f2..ea22449 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.html
@@ -217,184 +217,188 @@
 <span class="sourceLineNo">209</span>  @Test<a name="line.209"></a>
 <span class="sourceLineNo">210</span>  public void testDisableCacheDataBlock() throws IOException {<a name="line.210"></a>
 <span class="sourceLineNo">211</span>    Configuration conf = HBaseConfiguration.create();<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    CacheConfig cacheConfig = new CacheConfig(conf);<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    assertFalse(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.221"></a>
-<span class="sourceLineNo">222</span>    assertFalse(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    cacheConfig = new CacheConfig(conf);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    assertTrue(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    assertTrue(cacheConfig.shouldCacheDataCompressed());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    assertTrue(cacheConfig.shouldCacheDataOnWrite());<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, false);<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>    cacheConfig = new CacheConfig(conf);<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.254"></a>
-<span class="sourceLineNo">255</span><a name="line.255"></a>
-<span class="sourceLineNo">256</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, true);<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>    HColumnDescriptor family = new HColumnDescriptor("testDisableCacheDataBlock");<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    family.setBlockCacheEnabled(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    cacheConfig = new CacheConfig(conf, family);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.269"></a>
-<span class="sourceLineNo">270</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>  @Test<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  public void testCacheConfigDefaultLRUBlockCache() {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    basicBlockCacheOps(cc, false, true);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    assertTrue(cc.getBlockCache() instanceof LruBlockCache);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   */<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  @Test<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  public void testOffHeapBucketCacheConfig() {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    doBucketCacheConfigTest();<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>  @Test<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  public void testFileBucketCacheConfig() throws IOException {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    try {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      Path p = new Path(htu.getDataTestDir(), "bc.txt");<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      FileSystem fs = FileSystem.get(this.conf);<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      fs.create(p).close();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      doBucketCacheConfigTest();<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    } finally {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      htu.cleanupTestDir();<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  }<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  private void doBucketCacheConfigTest() {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final int bcSize = 100;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    basicBlockCacheOps(cc, false, false);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    BlockCache [] bcs = cbc.getBlockCaches();<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    assertTrue(bcs[0] instanceof LruBlockCache);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    LruBlockCache lbc = (LruBlockCache)bcs[0];<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    assertEquals(MemorySizeUtil.getOnHeapCacheSize(this.conf), lbc.getMaxSize());<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    assertTrue(bcs[1] instanceof BucketCache);<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    BucketCache bc = (BucketCache)bcs[1];<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  }<a name="line.323"></a>
-<span class="sourceLineNo">324</span><a name="line.324"></a>
-<span class="sourceLineNo">325</span>  /**<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * LruBlockCache as L1 with a BucketCache for L2.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  @Test<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  public void testBucketCacheConfigL1L2Setup() {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    // from L1 happens, it does not fail because L2 can't take the eviction because block too big.<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    long lruExpectedSize = MemorySizeUtil.getOnHeapCacheSize(this.conf);<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    final int bcSize = 100;<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    long bcExpectedSize = 100 * 1024 * 1024; // MB.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    assertTrue(lruExpectedSize &lt; bcExpectedSize);<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    basicBlockCacheOps(cc, false, false);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    LruBlockCache lbc = cbc.onHeapCache;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    assertEquals(lruExpectedSize, lbc.getMaxSize());<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    BlockCache bc = cbc.l2Cache;<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    long initialL1BlockCount = lbc.getBlockCount();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    long initialL2BlockCount = bc.getBlockCount();<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    Cacheable c = new DataCacheEntry();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    BlockCacheKey bck = new BlockCacheKey("bck", 0);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    lbc.cacheBlock(bck, c, false);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    assertEquals(initialL2BlockCount, bc.getBlockCount());<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    // Force evictions by putting in a block too big.<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    final long justTooBigSize = lbc.acceptableSize() + 1;<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      public long heapSize() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>        return justTooBigSize;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>      @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      public int getSerializedLength() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>        return (int)heapSize();<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      }<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    });<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // The eviction thread in lrublockcache needs to run.<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    assertEquals(initialL1BlockCount, lbc.getBlockCount());<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  @Test<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public void testL2CacheWithInvalidBucketSize() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    Configuration c = new Configuration(this.conf);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    try {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      CacheConfig.getBucketCache(c);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      fail("Should throw IllegalArgumentException when passing illegal value for bucket size");<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    } catch (IllegalArgumentException e) {<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>}<a name="line.389"></a>
+<span class="sourceLineNo">212</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    CacheConfig cacheConfig = new CacheConfig(conf);<a name="line.213"></a>
+<span class="sourceLineNo">214</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    assertFalse(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.222"></a>
+<span class="sourceLineNo">223</span>    assertFalse(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.228"></a>
+<span class="sourceLineNo">229</span><a name="line.229"></a>
+<span class="sourceLineNo">230</span>    cacheConfig = new CacheConfig(conf);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    assertTrue(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    assertTrue(cacheConfig.shouldCacheDataCompressed());<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    assertTrue(cacheConfig.shouldCacheDataOnWrite());<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.238"></a>
+<span class="sourceLineNo">239</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.240"></a>
+<span class="sourceLineNo">241</span><a name="line.241"></a>
+<span class="sourceLineNo">242</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, false);<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.243"></a>
+<span class="sourceLineNo">244</span><a name="line.244"></a>
+<span class="sourceLineNo">245</span>    cacheConfig = new CacheConfig(conf);<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.253"></a>
+<span class="sourceLineNo">254</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.255"></a>
+<span class="sourceLineNo">256</span><a name="line.256"></a>
+<span class="sourceLineNo">257</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, true);<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.258"></a>
+<span class="sourceLineNo">259</span><a name="line.259"></a>
+<span class="sourceLineNo">260</span>    HColumnDescriptor family = new HColumnDescriptor("testDisableCacheDataBlock");<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    family.setBlockCacheEnabled(false);<a name="line.261"></a>
+<span class="sourceLineNo">262</span><a name="line.262"></a>
+<span class="sourceLineNo">263</span>    cacheConfig = new CacheConfig(conf, family);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  }<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  @Test<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  public void testCacheConfigDefaultLRUBlockCache() {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    basicBlockCacheOps(cc, false, true);<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    assertTrue(cc.getBlockCache() instanceof LruBlockCache);<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  /**<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   */<a name="line.288"></a>
+<span class="sourceLineNo">289</span>  @Test<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public void testOffHeapBucketCacheConfig() {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    doBucketCacheConfigTest();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>  @Test<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  public void testFileBucketCacheConfig() throws IOException {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    try {<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      Path p = new Path(htu.getDataTestDir(), "bc.txt");<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      FileSystem fs = FileSystem.get(this.conf);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      fs.create(p).close();<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      doBucketCacheConfigTest();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    } finally {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      htu.cleanupTestDir();<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private void doBucketCacheConfigTest() {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    final int bcSize = 100;<a name="line.310"></a>
+<span class="sourceLineNo">311</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    basicBlockCacheOps(cc, false, false);<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    BlockCache [] bcs = cbc.getBlockCaches();<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    assertTrue(bcs[0] instanceof LruBlockCache);<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    LruBlockCache lbc = (LruBlockCache)bcs[0];<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    assertEquals(MemorySizeUtil.getOnHeapCacheSize(this.conf), lbc.getMaxSize());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    assertTrue(bcs[1] instanceof BucketCache);<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    BucketCache bc = (BucketCache)bcs[1];<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * LruBlockCache as L1 with a BucketCache for L2.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  @Test<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public void testBucketCacheConfigL1L2Setup() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    // from L1 happens, it does not fail because L2 can't take the eviction because block too big.<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    long lruExpectedSize = MemorySizeUtil.getOnHeapCacheSize(this.conf);<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    final int bcSize = 100;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>    long bcExpectedSize = 100 * 1024 * 1024; // MB.<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    assertTrue(lruExpectedSize &lt; bcExpectedSize);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    basicBlockCacheOps(cc, false, false);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    LruBlockCache lbc = cbc.onHeapCache;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>    assertEquals(lruExpectedSize, lbc.getMaxSize());<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    BlockCache bc = cbc.l2Cache;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    long initialL1BlockCount = lbc.getBlockCount();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    long initialL2BlockCount = bc.getBlockCount();<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    Cacheable c = new DataCacheEntry();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    BlockCacheKey bck = new BlockCacheKey("bck", 0);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    lbc.cacheBlock(bck, c, false);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    assertEquals(initialL2BlockCount, bc.getBlockCount());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    // Force evictions by putting in a block too big.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    final long justTooBigSize = lbc.acceptableSize() + 1;<a name="line.364"></a>
+<span class="sourceLineNo">365</span>    lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      @Override<a name="line.366"></a>
+<span class="sourceLineNo">367</span>      public long heapSize() {<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        return justTooBigSize;<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>      @Override<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      public int getSerializedLength() {<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        return (int)heapSize();<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      }<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    });<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // The eviction thread in lrublockcache needs to run.<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    assertEquals(initialL1BlockCount, lbc.getBlockCount());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  }<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  @Test<a name="line.381"></a>
+<span class="sourceLineNo">382</span>  public void testL2CacheWithInvalidBucketSize() {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>    Configuration c = new Configuration(this.conf);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");<a name="line.385"></a>
+<span class="sourceLineNo">386</span>    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);<a name="line.386"></a>
+<span class="sourceLineNo">387</span>    try {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      CacheConfig.getBucketCache(c);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>      fail("Should throw IllegalArgumentException when passing illegal value for bucket size");<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    } catch (IllegalArgumentException e) {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    }<a name="line.391"></a>
+<span class="sourceLineNo">392</span>  }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>}<a name="line.393"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.CacheOnWriteType.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.CacheOnWriteType.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.CacheOnWriteType.html
index 4cd4f1b..21a78bc 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.CacheOnWriteType.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.CacheOnWriteType.html
@@ -168,75 +168,75 @@
 <span class="sourceLineNo">160</span>    Configuration conf = TEST_UTIL.getConfiguration();<a name="line.160"></a>
 <span class="sourceLineNo">161</span>    List&lt;BlockCache&gt; blockcaches = new ArrayList&lt;&gt;();<a name="line.161"></a>
 <span class="sourceLineNo">162</span>    // default<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    blockcaches.add(new CacheConfig(conf).getBlockCache());<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    //set LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME to 2.0f due to HBASE-16287<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    TEST_UTIL.getConfiguration().setFloat(LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME, 2.0f);<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    // memory<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    BlockCache lru = new LruBlockCache(128 * 1024 * 1024, 64 * 1024, TEST_UTIL.getConfiguration());<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    blockcaches.add(lru);<a name="line.169"></a>
-<span class="sourceLineNo">170</span><a name="line.170"></a>
-<span class="sourceLineNo">171</span>    // bucket cache<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    FileSystem.get(conf).mkdirs(TEST_UTIL.getDataTestDir());<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    int[] bucketSizes =<a name="line.173"></a>
-<span class="sourceLineNo">174</span>        { INDEX_BLOCK_SIZE, DATA_BLOCK_SIZE, BLOOM_BLOCK_SIZE, 64 * 1024, 128 * 1024 };<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    BlockCache bucketcache =<a name="line.175"></a>
-<span class="sourceLineNo">176</span>        new BucketCache("offheap", 128 * 1024 * 1024, 64 * 1024, bucketSizes, 5, 64 * 100, null);<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    blockcaches.add(bucketcache);<a name="line.177"></a>
-<span class="sourceLineNo">178</span>    return blockcaches;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  }<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>  @Parameters<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  public static Collection&lt;Object[]&gt; getParameters() throws IOException {<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    List&lt;Object[]&gt; params = new ArrayList&lt;&gt;();<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    for (BlockCache blockCache : getBlockCaches()) {<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      for (CacheOnWriteType cowType : CacheOnWriteType.values()) {<a name="line.185"></a>
-<span class="sourceLineNo">186</span>        for (Compression.Algorithm compress : HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>          for (boolean cacheCompressedData : new boolean[] { false, true }) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>            params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });<a name="line.188"></a>
-<span class="sourceLineNo">189</span>          }<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        }<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      }<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    }<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    return params;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  }<a name="line.194"></a>
-<span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  private void clearBlockCache(BlockCache blockCache) throws InterruptedException {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    if (blockCache instanceof LruBlockCache) {<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      ((LruBlockCache) blockCache).clearCache();<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    } else {<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      // BucketCache may not return all cached blocks(blocks in write queue), so check it here.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>      for (int clearCount = 0; blockCache.getBlockCount() &gt; 0; clearCount++) {<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        if (clearCount &gt; 0) {<a name="line.202"></a>
-<span class="sourceLineNo">203</span>          LOG.warn("clear block cache " + blockCache + " " + clearCount + " times, "<a name="line.203"></a>
-<span class="sourceLineNo">204</span>              + blockCache.getBlockCount() + " blocks remaining");<a name="line.204"></a>
-<span class="sourceLineNo">205</span>          Thread.sleep(10);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>        }<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        for (CachedBlock block : Lists.newArrayList(blockCache)) {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>          BlockCacheKey key = new BlockCacheKey(block.getFilename(), block.getOffset());<a name="line.208"></a>
-<span class="sourceLineNo">209</span>          // CombinedBucketCache may need evict two times.<a name="line.209"></a>
-<span class="sourceLineNo">210</span>          for (int evictCount = 0; blockCache.evictBlock(key); evictCount++) {<a name="line.210"></a>
-<span class="sourceLineNo">211</span>            if (evictCount &gt; 1) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>              LOG.warn("evict block " + block + " in " + blockCache + " " + evictCount<a name="line.212"></a>
-<span class="sourceLineNo">213</span>                  + " times, maybe a bug here");<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          }<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        }<a name="line.216"></a>
-<span class="sourceLineNo">217</span>      }<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    }<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  }<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>  @Before<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  public void setUp() throws IOException {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    conf = TEST_UTIL.getConfiguration();<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    this.conf.set("dfs.datanode.data.dir.perm", "700");<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, INDEX_BLOCK_SIZE);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        BLOOM_BLOCK_SIZE);<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, cacheCompressedData);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    cowType.modifyConf(conf);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    fs = HFileSystem.get(conf);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    CacheConfig.GLOBAL_BLOCK_CACHE_INSTANCE = blockCache;<a name="line.231"></a>
+<span class="sourceLineNo">163</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    blockcaches.add(new CacheConfig(conf).getBlockCache());<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span>    //set LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME to 2.0f due to HBASE-16287<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    TEST_UTIL.getConfiguration().setFloat(LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME, 2.0f);<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    // memory<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    BlockCache lru = new LruBlockCache(128 * 1024 * 1024, 64 * 1024, TEST_UTIL.getConfiguration());<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    blockcaches.add(lru);<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>    // bucket cache<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    FileSystem.get(conf).mkdirs(TEST_UTIL.getDataTestDir());<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    int[] bucketSizes =<a name="line.174"></a>
+<span class="sourceLineNo">175</span>        { INDEX_BLOCK_SIZE, DATA_BLOCK_SIZE, BLOOM_BLOCK_SIZE, 64 * 1024, 128 * 1024 };<a name="line.175"></a>
+<span class="sourceLineNo">176</span>    BlockCache bucketcache =<a name="line.176"></a>
+<span class="sourceLineNo">177</span>        new BucketCache("offheap", 128 * 1024 * 1024, 64 * 1024, bucketSizes, 5, 64 * 100, null);<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    blockcaches.add(bucketcache);<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    return blockcaches;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>  }<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>  @Parameters<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  public static Collection&lt;Object[]&gt; getParameters() throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    List&lt;Object[]&gt; params = new ArrayList&lt;&gt;();<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    for (BlockCache blockCache : getBlockCaches()) {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>      for (CacheOnWriteType cowType : CacheOnWriteType.values()) {<a name="line.186"></a>
+<span class="sourceLineNo">187</span>        for (Compression.Algorithm compress : HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>          for (boolean cacheCompressedData : new boolean[] { false, true }) {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>            params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });<a name="line.189"></a>
+<span class="sourceLineNo">190</span>          }<a name="line.190"></a>
+<span class="sourceLineNo">191</span>        }<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      }<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    }<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    return params;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  private void clearBlockCache(BlockCache blockCache) throws InterruptedException {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    if (blockCache instanceof LruBlockCache) {<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      ((LruBlockCache) blockCache).clearCache();<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    } else {<a name="line.200"></a>
+<span class="sourceLineNo">201</span>      // BucketCache may not return all cached blocks(blocks in write queue), so check it here.<a name="line.201"></a>
+<span class="sourceLineNo">202</span>      for (int clearCount = 0; blockCache.getBlockCount() &gt; 0; clearCount++) {<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        if (clearCount &gt; 0) {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>          LOG.warn("clear block cache " + blockCache + " " + clearCount + " times, "<a name="line.204"></a>
+<span class="sourceLineNo">205</span>              + blockCache.getBlockCount() + " blocks remaining");<a name="line.205"></a>
+<span class="sourceLineNo">206</span>          Thread.sleep(10);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>        }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        for (CachedBlock block : Lists.newArrayList(blockCache)) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>          BlockCacheKey key = new BlockCacheKey(block.getFilename(), block.getOffset());<a name="line.209"></a>
+<span class="sourceLineNo">210</span>          // CombinedBucketCache may need evict two times.<a name="line.210"></a>
+<span class="sourceLineNo">211</span>          for (int evictCount = 0; blockCache.evictBlock(key); evictCount++) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>            if (evictCount &gt; 1) {<a name="line.212"></a>
+<span class="sourceLineNo">213</span>              LOG.warn("evict block " + block + " in " + blockCache + " " + evictCount<a name="line.213"></a>
+<span class="sourceLineNo">214</span>                  + " times, maybe a bug here");<a name="line.214"></a>
+<span class="sourceLineNo">215</span>            }<a name="line.215"></a>
+<span class="sourceLineNo">216</span>          }<a name="line.216"></a>
+<span class="sourceLineNo">217</span>        }<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      }<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  }<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>  @Before<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  public void setUp() throws IOException {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>    conf = TEST_UTIL.getConfiguration();<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    this.conf.set("dfs.datanode.data.dir.perm", "700");<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, INDEX_BLOCK_SIZE);<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        BLOOM_BLOCK_SIZE);<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, cacheCompressedData);<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    cowType.modifyConf(conf);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    fs = HFileSystem.get(conf);<a name="line.231"></a>
 <span class="sourceLineNo">232</span>    cacheConf =<a name="line.232"></a>
 <span class="sourceLineNo">233</span>        new CacheConfig(blockCache, true, true, cowType.shouldBeCached(BlockType.DATA),<a name="line.233"></a>
 <span class="sourceLineNo">234</span>        cowType.shouldBeCached(BlockType.LEAF_INDEX),<a name="line.234"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html
index 4cd4f1b..21a78bc 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.html
@@ -168,75 +168,75 @@
 <span class="sourceLineNo">160</span>    Configuration conf = TEST_UTIL.getConfiguration();<a name="line.160"></a>
 <span class="sourceLineNo">161</span>    List&lt;BlockCache&gt; blockcaches = new ArrayList&lt;&gt;();<a name="line.161"></a>
 <span class="sourceLineNo">162</span>    // default<a name="line.162"></a>
-<span class="sourceLineNo">163</span>    blockcaches.add(new CacheConfig(conf).getBlockCache());<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    //set LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME to 2.0f due to HBASE-16287<a name="line.165"></a>
-<span class="sourceLineNo">166</span>    TEST_UTIL.getConfiguration().setFloat(LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME, 2.0f);<a name="line.166"></a>
-<span class="sourceLineNo">167</span>    // memory<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    BlockCache lru = new LruBlockCache(128 * 1024 * 1024, 64 * 1024, TEST_UTIL.getConfiguration());<a name="line.168"></a>
-<span class="sourceLineNo">169</span>    blockcaches.add(lru);<a name="line.169"></a>
-<span class="sourceLineNo">170</span><a name="line.170"></a>
-<span class="sourceLineNo">171</span>    // bucket cache<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    FileSystem.get(conf).mkdirs(TEST_UTIL.getDataTestDir());<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    int[] bucketSizes =<a name="line.173"></a>
-<span class="sourceLineNo">174</span>        { INDEX_BLOCK_SIZE, DATA_BLOCK_SIZE, BLOOM_BLOCK_SIZE, 64 * 1024, 128 * 1024 };<a name="line.174"></a>
-<span class="sourceLineNo">175</span>    BlockCache bucketcache =<a name="line.175"></a>
-<span class="sourceLineNo">176</span>        new BucketCache("offheap", 128 * 1024 * 1024, 64 * 1024, bucketSizes, 5, 64 * 100, null);<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    blockcaches.add(bucketcache);<a name="line.177"></a>
-<span class="sourceLineNo">178</span>    return blockcaches;<a name="line.178"></a>
-<span class="sourceLineNo">179</span>  }<a name="line.179"></a>
-<span class="sourceLineNo">180</span><a name="line.180"></a>
-<span class="sourceLineNo">181</span>  @Parameters<a name="line.181"></a>
-<span class="sourceLineNo">182</span>  public static Collection&lt;Object[]&gt; getParameters() throws IOException {<a name="line.182"></a>
-<span class="sourceLineNo">183</span>    List&lt;Object[]&gt; params = new ArrayList&lt;&gt;();<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    for (BlockCache blockCache : getBlockCaches()) {<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      for (CacheOnWriteType cowType : CacheOnWriteType.values()) {<a name="line.185"></a>
-<span class="sourceLineNo">186</span>        for (Compression.Algorithm compress : HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {<a name="line.186"></a>
-<span class="sourceLineNo">187</span>          for (boolean cacheCompressedData : new boolean[] { false, true }) {<a name="line.187"></a>
-<span class="sourceLineNo">188</span>            params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });<a name="line.188"></a>
-<span class="sourceLineNo">189</span>          }<a name="line.189"></a>
-<span class="sourceLineNo">190</span>        }<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      }<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    }<a name="line.192"></a>
-<span class="sourceLineNo">193</span>    return params;<a name="line.193"></a>
-<span class="sourceLineNo">194</span>  }<a name="line.194"></a>
-<span class="sourceLineNo">195</span><a name="line.195"></a>
-<span class="sourceLineNo">196</span>  private void clearBlockCache(BlockCache blockCache) throws InterruptedException {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>    if (blockCache instanceof LruBlockCache) {<a name="line.197"></a>
-<span class="sourceLineNo">198</span>      ((LruBlockCache) blockCache).clearCache();<a name="line.198"></a>
-<span class="sourceLineNo">199</span>    } else {<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      // BucketCache may not return all cached blocks(blocks in write queue), so check it here.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>      for (int clearCount = 0; blockCache.getBlockCount() &gt; 0; clearCount++) {<a name="line.201"></a>
-<span class="sourceLineNo">202</span>        if (clearCount &gt; 0) {<a name="line.202"></a>
-<span class="sourceLineNo">203</span>          LOG.warn("clear block cache " + blockCache + " " + clearCount + " times, "<a name="line.203"></a>
-<span class="sourceLineNo">204</span>              + blockCache.getBlockCount() + " blocks remaining");<a name="line.204"></a>
-<span class="sourceLineNo">205</span>          Thread.sleep(10);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>        }<a name="line.206"></a>
-<span class="sourceLineNo">207</span>        for (CachedBlock block : Lists.newArrayList(blockCache)) {<a name="line.207"></a>
-<span class="sourceLineNo">208</span>          BlockCacheKey key = new BlockCacheKey(block.getFilename(), block.getOffset());<a name="line.208"></a>
-<span class="sourceLineNo">209</span>          // CombinedBucketCache may need evict two times.<a name="line.209"></a>
-<span class="sourceLineNo">210</span>          for (int evictCount = 0; blockCache.evictBlock(key); evictCount++) {<a name="line.210"></a>
-<span class="sourceLineNo">211</span>            if (evictCount &gt; 1) {<a name="line.211"></a>
-<span class="sourceLineNo">212</span>              LOG.warn("evict block " + block + " in " + blockCache + " " + evictCount<a name="line.212"></a>
-<span class="sourceLineNo">213</span>                  + " times, maybe a bug here");<a name="line.213"></a>
-<span class="sourceLineNo">214</span>            }<a name="line.214"></a>
-<span class="sourceLineNo">215</span>          }<a name="line.215"></a>
-<span class="sourceLineNo">216</span>        }<a name="line.216"></a>
-<span class="sourceLineNo">217</span>      }<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    }<a name="line.218"></a>
-<span class="sourceLineNo">219</span>  }<a name="line.219"></a>
-<span class="sourceLineNo">220</span><a name="line.220"></a>
-<span class="sourceLineNo">221</span>  @Before<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  public void setUp() throws IOException {<a name="line.222"></a>
-<span class="sourceLineNo">223</span>    conf = TEST_UTIL.getConfiguration();<a name="line.223"></a>
-<span class="sourceLineNo">224</span>    this.conf.set("dfs.datanode.data.dir.perm", "700");<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, INDEX_BLOCK_SIZE);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,<a name="line.226"></a>
-<span class="sourceLineNo">227</span>        BLOOM_BLOCK_SIZE);<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, cacheCompressedData);<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    cowType.modifyConf(conf);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    fs = HFileSystem.get(conf);<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    CacheConfig.GLOBAL_BLOCK_CACHE_INSTANCE = blockCache;<a name="line.231"></a>
+<span class="sourceLineNo">163</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.163"></a>
+<span class="sourceLineNo">164</span>    blockcaches.add(new CacheConfig(conf).getBlockCache());<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span>    //set LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME to 2.0f due to HBASE-16287<a name="line.166"></a>
+<span class="sourceLineNo">167</span>    TEST_UTIL.getConfiguration().setFloat(LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME, 2.0f);<a name="line.167"></a>
+<span class="sourceLineNo">168</span>    // memory<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    BlockCache lru = new LruBlockCache(128 * 1024 * 1024, 64 * 1024, TEST_UTIL.getConfiguration());<a name="line.169"></a>
+<span class="sourceLineNo">170</span>    blockcaches.add(lru);<a name="line.170"></a>
+<span class="sourceLineNo">171</span><a name="line.171"></a>
+<span class="sourceLineNo">172</span>    // bucket cache<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    FileSystem.get(conf).mkdirs(TEST_UTIL.getDataTestDir());<a name="line.173"></a>
+<span class="sourceLineNo">174</span>    int[] bucketSizes =<a name="line.174"></a>
+<span class="sourceLineNo">175</span>        { INDEX_BLOCK_SIZE, DATA_BLOCK_SIZE, BLOOM_BLOCK_SIZE, 64 * 1024, 128 * 1024 };<a name="line.175"></a>
+<span class="sourceLineNo">176</span>    BlockCache bucketcache =<a name="line.176"></a>
+<span class="sourceLineNo">177</span>        new BucketCache("offheap", 128 * 1024 * 1024, 64 * 1024, bucketSizes, 5, 64 * 100, null);<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    blockcaches.add(bucketcache);<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    return blockcaches;<a name="line.179"></a>
+<span class="sourceLineNo">180</span>  }<a name="line.180"></a>
+<span class="sourceLineNo">181</span><a name="line.181"></a>
+<span class="sourceLineNo">182</span>  @Parameters<a name="line.182"></a>
+<span class="sourceLineNo">183</span>  public static Collection&lt;Object[]&gt; getParameters() throws IOException {<a name="line.183"></a>
+<span class="sourceLineNo">184</span>    List&lt;Object[]&gt; params = new ArrayList&lt;&gt;();<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    for (BlockCache blockCache : getBlockCaches()) {<a name="line.185"></a>
+<span class="sourceLineNo">186</span>      for (CacheOnWriteType cowType : CacheOnWriteType.values()) {<a name="line.186"></a>
+<span class="sourceLineNo">187</span>        for (Compression.Algorithm compress : HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {<a name="line.187"></a>
+<span class="sourceLineNo">188</span>          for (boolean cacheCompressedData : new boolean[] { false, true }) {<a name="line.188"></a>
+<span class="sourceLineNo">189</span>            params.add(new Object[] { cowType, compress, cacheCompressedData, blockCache });<a name="line.189"></a>
+<span class="sourceLineNo">190</span>          }<a name="line.190"></a>
+<span class="sourceLineNo">191</span>        }<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      }<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    }<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    return params;<a name="line.194"></a>
+<span class="sourceLineNo">195</span>  }<a name="line.195"></a>
+<span class="sourceLineNo">196</span><a name="line.196"></a>
+<span class="sourceLineNo">197</span>  private void clearBlockCache(BlockCache blockCache) throws InterruptedException {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>    if (blockCache instanceof LruBlockCache) {<a name="line.198"></a>
+<span class="sourceLineNo">199</span>      ((LruBlockCache) blockCache).clearCache();<a name="line.199"></a>
+<span class="sourceLineNo">200</span>    } else {<a name="line.200"></a>
+<span class="sourceLineNo">201</span>      // BucketCache may not return all cached blocks(blocks in write queue), so check it here.<a name="line.201"></a>
+<span class="sourceLineNo">202</span>      for (int clearCount = 0; blockCache.getBlockCount() &gt; 0; clearCount++) {<a name="line.202"></a>
+<span class="sourceLineNo">203</span>        if (clearCount &gt; 0) {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>          LOG.warn("clear block cache " + blockCache + " " + clearCount + " times, "<a name="line.204"></a>
+<span class="sourceLineNo">205</span>              + blockCache.getBlockCount() + " blocks remaining");<a name="line.205"></a>
+<span class="sourceLineNo">206</span>          Thread.sleep(10);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>        }<a name="line.207"></a>
+<span class="sourceLineNo">208</span>        for (CachedBlock block : Lists.newArrayList(blockCache)) {<a name="line.208"></a>
+<span class="sourceLineNo">209</span>          BlockCacheKey key = new BlockCacheKey(block.getFilename(), block.getOffset());<a name="line.209"></a>
+<span class="sourceLineNo">210</span>          // CombinedBucketCache may need evict two times.<a name="line.210"></a>
+<span class="sourceLineNo">211</span>          for (int evictCount = 0; blockCache.evictBlock(key); evictCount++) {<a name="line.211"></a>
+<span class="sourceLineNo">212</span>            if (evictCount &gt; 1) {<a name="line.212"></a>
+<span class="sourceLineNo">213</span>              LOG.warn("evict block " + block + " in " + blockCache + " " + evictCount<a name="line.213"></a>
+<span class="sourceLineNo">214</span>                  + " times, maybe a bug here");<a name="line.214"></a>
+<span class="sourceLineNo">215</span>            }<a name="line.215"></a>
+<span class="sourceLineNo">216</span>          }<a name="line.216"></a>
+<span class="sourceLineNo">217</span>        }<a name="line.217"></a>
+<span class="sourceLineNo">218</span>      }<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    }<a name="line.219"></a>
+<span class="sourceLineNo">220</span>  }<a name="line.220"></a>
+<span class="sourceLineNo">221</span><a name="line.221"></a>
+<span class="sourceLineNo">222</span>  @Before<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  public void setUp() throws IOException {<a name="line.223"></a>
+<span class="sourceLineNo">224</span>    conf = TEST_UTIL.getConfiguration();<a name="line.224"></a>
+<span class="sourceLineNo">225</span>    this.conf.set("dfs.datanode.data.dir.perm", "700");<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, INDEX_BLOCK_SIZE);<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        BLOOM_BLOCK_SIZE);<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, cacheCompressedData);<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    cowType.modifyConf(conf);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    fs = HFileSystem.get(conf);<a name="line.231"></a>
 <span class="sourceLineNo">232</span>    cacheConf =<a name="line.232"></a>
 <span class="sourceLineNo">233</span>        new CacheConfig(blockCache, true, true, cowType.shouldBeCached(BlockType.DATA),<a name="line.233"></a>
 <span class="sourceLineNo">234</span>        cowType.shouldBeCached(BlockType.LEAF_INDEX),<a name="line.234"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html
index b16e1c7..169b747 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.html
@@ -114,50 +114,51 @@
 <span class="sourceLineNo">106</span>    // Make sure we make a new one each time.<a name="line.106"></a>
 <span class="sourceLineNo">107</span>    CacheConfig.clearGlobalInstances();<a name="line.107"></a>
 <span class="sourceLineNo">108</span>    HFile.DATABLOCK_READ_COUNT.reset();<a name="line.108"></a>
-<span class="sourceLineNo">109</span>  }<a name="line.109"></a>
-<span class="sourceLineNo">110</span><a name="line.110"></a>
-<span class="sourceLineNo">111</span>  @Test<a name="line.111"></a>
-<span class="sourceLineNo">112</span>  public void testCacheBlocks() throws IOException {<a name="line.112"></a>
-<span class="sourceLineNo">113</span>    // Set index block size to be the same as normal block size.<a name="line.113"></a>
-<span class="sourceLineNo">114</span>    TEST_UTIL.getConfiguration().setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, BLOCK_SIZE);<a name="line.114"></a>
-<span class="sourceLineNo">115</span>    HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes(CF)).setMaxVersions(MAX_VERSIONS).<a name="line.115"></a>
-<span class="sourceLineNo">116</span>      setCompressionType(COMPRESSION_ALGORITHM).<a name="line.116"></a>
-<span class="sourceLineNo">117</span>      setBloomFilterType(BLOOM_TYPE);<a name="line.117"></a>
-<span class="sourceLineNo">118</span>    hcd.setBlocksize(BLOCK_SIZE);<a name="line.118"></a>
-<span class="sourceLineNo">119</span>    hcd.setBlockCacheEnabled(cfCacheEnabled);<a name="line.119"></a>
-<span class="sourceLineNo">120</span>    HRegion region = TEST_UTIL.createTestRegion(TABLE, hcd);<a name="line.120"></a>
-<span class="sourceLineNo">121</span>    BlockCache cache = region.getStore(hcd.getName()).getCacheConfig().getBlockCache();<a name="line.121"></a>
-<span class="sourceLineNo">122</span>    CacheStats stats = cache.getStats();<a name="line.122"></a>
-<span class="sourceLineNo">123</span>    writeTestData(region);<a name="line.123"></a>
-<span class="sourceLineNo">124</span>    assertEquals(0, stats.getHitCount());<a name="line.124"></a>
-<span class="sourceLineNo">125</span>    assertEquals(0, HFile.DATABLOCK_READ_COUNT.sum());<a name="line.125"></a>
-<span class="sourceLineNo">126</span>    // Do a single get, take count of caches.  If we are NOT caching DATA blocks, the miss<a name="line.126"></a>
-<span class="sourceLineNo">127</span>    // count should go up.  Otherwise, all should be cached and the miss count should not rise.<a name="line.127"></a>
-<span class="sourceLineNo">128</span>    region.get(new Get(Bytes.toBytes("row" + 0)));<a name="line.128"></a>
-<span class="sourceLineNo">129</span>    assertTrue(stats.getHitCount() &gt; 0);<a name="line.129"></a>
-<span class="sourceLineNo">130</span>    assertTrue(HFile.DATABLOCK_READ_COUNT.sum() &gt; 0);<a name="line.130"></a>
-<span class="sourceLineNo">131</span>    long missCount = stats.getMissCount();<a name="line.131"></a>
-<span class="sourceLineNo">132</span>    region.get(new Get(Bytes.toBytes("row" + 0)));<a name="line.132"></a>
-<span class="sourceLineNo">133</span>    if (this.cfCacheEnabled) assertEquals(missCount, stats.getMissCount());<a name="line.133"></a>
-<span class="sourceLineNo">134</span>    else assertTrue(stats.getMissCount() &gt; missCount);<a name="line.134"></a>
-<span class="sourceLineNo">135</span>  }<a name="line.135"></a>
-<span class="sourceLineNo">136</span><a name="line.136"></a>
-<span class="sourceLineNo">137</span>  private void writeTestData(HRegion region) throws IOException {<a name="line.137"></a>
-<span class="sourceLineNo">138</span>    for (int i = 0; i &lt; NUM_ROWS; ++i) {<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      Put put = new Put(Bytes.toBytes("row" + i));<a name="line.139"></a>
-<span class="sourceLineNo">140</span>      for (int j = 0; j &lt; NUM_COLS_PER_ROW; ++j) {<a name="line.140"></a>
-<span class="sourceLineNo">141</span>        for (long ts = 1; ts &lt; NUM_TIMESTAMPS_PER_COL; ++ts) {<a name="line.141"></a>
-<span class="sourceLineNo">142</span>          put.addColumn(CF_BYTES, Bytes.toBytes("col" + j), ts,<a name="line.142"></a>
-<span class="sourceLineNo">143</span>                  Bytes.toBytes("value" + i + "_" + j + "_" + ts));<a name="line.143"></a>
-<span class="sourceLineNo">144</span>        }<a name="line.144"></a>
-<span class="sourceLineNo">145</span>      }<a name="line.145"></a>
-<span class="sourceLineNo">146</span>      region.put(put);<a name="line.146"></a>
-<span class="sourceLineNo">147</span>      if ((i + 1) % ROWS_PER_HFILE == 0) {<a name="line.147"></a>
-<span class="sourceLineNo">148</span>        region.flush(true);<a name="line.148"></a>
-<span class="sourceLineNo">149</span>      }<a name="line.149"></a>
-<span class="sourceLineNo">150</span>    }<a name="line.150"></a>
-<span class="sourceLineNo">151</span>  }<a name="line.151"></a>
-<span class="sourceLineNo">152</span>}<a name="line.152"></a>
+<span class="sourceLineNo">109</span>    CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration());<a name="line.109"></a>
+<span class="sourceLineNo">110</span>  }<a name="line.110"></a>
+<span class="sourceLineNo">111</span><a name="line.111"></a>
+<span class="sourceLineNo">112</span>  @Test<a name="line.112"></a>
+<span class="sourceLineNo">113</span>  public void testCacheBlocks() throws IOException {<a name="line.113"></a>
+<span class="sourceLineNo">114</span>    // Set index block size to be the same as normal block size.<a name="line.114"></a>
+<span class="sourceLineNo">115</span>    TEST_UTIL.getConfiguration().setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, BLOCK_SIZE);<a name="line.115"></a>
+<span class="sourceLineNo">116</span>    HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes(CF)).setMaxVersions(MAX_VERSIONS).<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      setCompressionType(COMPRESSION_ALGORITHM).<a name="line.117"></a>
+<span class="sourceLineNo">118</span>      setBloomFilterType(BLOOM_TYPE);<a name="line.118"></a>
+<span class="sourceLineNo">119</span>    hcd.setBlocksize(BLOCK_SIZE);<a name="line.119"></a>
+<span class="sourceLineNo">120</span>    hcd.setBlockCacheEnabled(cfCacheEnabled);<a name="line.120"></a>
+<span class="sourceLineNo">121</span>    HRegion region = TEST_UTIL.createTestRegion(TABLE, hcd);<a name="line.121"></a>
+<span class="sourceLineNo">122</span>    BlockCache cache = region.getStore(hcd.getName()).getCacheConfig().getBlockCache();<a name="line.122"></a>
+<span class="sourceLineNo">123</span>    CacheStats stats = cache.getStats();<a name="line.123"></a>
+<span class="sourceLineNo">124</span>    writeTestData(region);<a name="line.124"></a>
+<span class="sourceLineNo">125</span>    assertEquals(0, stats.getHitCount());<a name="line.125"></a>
+<span class="sourceLineNo">126</span>    assertEquals(0, HFile.DATABLOCK_READ_COUNT.sum());<a name="line.126"></a>
+<span class="sourceLineNo">127</span>    // Do a single get, take count of caches.  If we are NOT caching DATA blocks, the miss<a name="line.127"></a>
+<span class="sourceLineNo">128</span>    // count should go up.  Otherwise, all should be cached and the miss count should not rise.<a name="line.128"></a>
+<span class="sourceLineNo">129</span>    region.get(new Get(Bytes.toBytes("row" + 0)));<a name="line.129"></a>
+<span class="sourceLineNo">130</span>    assertTrue(stats.getHitCount() &gt; 0);<a name="line.130"></a>
+<span class="sourceLineNo">131</span>    assertTrue(HFile.DATABLOCK_READ_COUNT.sum() &gt; 0);<a name="line.131"></a>
+<span class="sourceLineNo">132</span>    long missCount = stats.getMissCount();<a name="line.132"></a>
+<span class="sourceLineNo">133</span>    region.get(new Get(Bytes.toBytes("row" + 0)));<a name="line.133"></a>
+<span class="sourceLineNo">134</span>    if (this.cfCacheEnabled) assertEquals(missCount, stats.getMissCount());<a name="line.134"></a>
+<span class="sourceLineNo">135</span>    else assertTrue(stats.getMissCount() &gt; missCount);<a name="line.135"></a>
+<span class="sourceLineNo">136</span>  }<a name="line.136"></a>
+<span class="sourceLineNo">137</span><a name="line.137"></a>
+<span class="sourceLineNo">138</span>  private void writeTestData(HRegion region) throws IOException {<a name="line.138"></a>
+<span class="sourceLineNo">139</span>    for (int i = 0; i &lt; NUM_ROWS; ++i) {<a name="line.139"></a>
+<span class="sourceLineNo">140</span>      Put put = new Put(Bytes.toBytes("row" + i));<a name="line.140"></a>
+<span class="sourceLineNo">141</span>      for (int j = 0; j &lt; NUM_COLS_PER_ROW; ++j) {<a name="line.141"></a>
+<span class="sourceLineNo">142</span>        for (long ts = 1; ts &lt; NUM_TIMESTAMPS_PER_COL; ++ts) {<a name="line.142"></a>
+<span class="sourceLineNo">143</span>          put.addColumn(CF_BYTES, Bytes.toBytes("col" + j), ts,<a name="line.143"></a>
+<span class="sourceLineNo">144</span>                  Bytes.toBytes("value" + i + "_" + j + "_" + ts));<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        }<a name="line.145"></a>
+<span class="sourceLineNo">146</span>      }<a name="line.146"></a>
+<span class="sourceLineNo">147</span>      region.put(put);<a name="line.147"></a>
+<span class="sourceLineNo">148</span>      if ((i + 1) % ROWS_PER_HFILE == 0) {<a name="line.148"></a>
+<span class="sourceLineNo">149</span>        region.flush(true);<a name="line.149"></a>
+<span class="sourceLineNo">150</span>      }<a name="line.150"></a>
+<span class="sourceLineNo">151</span>    }<a name="line.151"></a>
+<span class="sourceLineNo">152</span>  }<a name="line.152"></a>
+<span class="sourceLineNo">153</span>}<a name="line.153"></a>
 
 
 


[26/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/dccdd274
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/dccdd274
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/dccdd274

Branch: refs/heads/asf-site
Commit: dccdd274369b2d20af71113a816c31993c914907
Parents: 3d72455
Author: jenkins <bu...@apache.org>
Authored: Tue Nov 27 14:52:17 2018 +0000
Committer: jenkins <bu...@apache.org>
Committed: Tue Nov 27 14:52:17 2018 +0000

----------------------------------------------------------------------
 acid-semantics.html                             |     4 +-
 apache_hbase_reference_guide.pdf                |     4 +-
 book.html                                       |     2 +-
 bulk-loads.html                                 |     4 +-
 checkstyle-aggregate.html                       | 27422 ++++++++---------
 checkstyle.rss                                  |    10 +-
 coc.html                                        |     4 +-
 dependencies.html                               |     4 +-
 dependency-convergence.html                     |     4 +-
 dependency-info.html                            |     4 +-
 dependency-management.html                      |     4 +-
 devapidocs/constant-values.html                 |     4 +-
 devapidocs/index-all.html                       |     6 -
 .../hadoop/hbase/backup/package-tree.html       |     2 +-
 .../hadoop/hbase/client/package-tree.html       |    24 +-
 .../hadoop/hbase/coprocessor/package-tree.html  |     2 +-
 .../hadoop/hbase/filter/package-tree.html       |    10 +-
 .../hadoop/hbase/io/hfile/CacheConfig.html      |   114 +-
 .../hadoop/hbase/io/hfile/package-tree.html     |     4 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   |     2 +-
 .../hadoop/hbase/mapreduce/package-tree.html    |     2 +-
 .../hbase/master/balancer/package-tree.html     |     2 +-
 .../hadoop/hbase/master/package-tree.html       |     4 +-
 .../hbase/master/procedure/package-tree.html    |     2 +-
 .../apache/hadoop/hbase/mob/MobCacheConfig.html |    20 +-
 .../hadoop/hbase/monitoring/package-tree.html   |     2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |    18 +-
 .../hadoop/hbase/procedure2/package-tree.html   |     4 +-
 .../hadoop/hbase/quotas/package-tree.html       |     8 +-
 .../AbstractMultiFileWriter.WriterFactory.html  |     4 +-
 .../regionserver/AbstractMultiFileWriter.html   |    24 +-
 .../HRegionServer.CompactionChecker.html        |    14 +-
 .../HRegionServer.MovedRegionInfo.html          |    16 +-
 .../HRegionServer.MovedRegionsCleaner.html      |    16 +-
 .../HRegionServer.PeriodicMemStoreFlusher.html  |    14 +-
 ...RegionServer.SystemExitWhenAbortTimeout.html |     6 +-
 .../hbase/regionserver/HRegionServer.html       |   350 +-
 .../hadoop/hbase/regionserver/package-tree.html |    20 +-
 .../regionserver/querymatcher/package-tree.html |     2 +-
 .../hbase/regionserver/wal/package-tree.html    |     2 +-
 .../hadoop/hbase/replication/package-tree.html  |     2 +-
 .../replication/regionserver/package-tree.html  |     2 +-
 .../hadoop/hbase/rest/model/package-tree.html   |     2 +-
 .../hbase/security/access/package-tree.html     |     2 +-
 .../hadoop/hbase/security/package-tree.html     |     4 +-
 .../hadoop/hbase/thrift/package-tree.html       |     2 +-
 .../apache/hadoop/hbase/util/package-tree.html  |     8 +-
 .../org/apache/hadoop/hbase/Version.html        |     4 +-
 .../hfile/CacheConfig.ExternalBlockCaches.html  |   862 +-
 .../hadoop/hbase/io/hfile/CacheConfig.html      |   862 +-
 .../apache/hadoop/hbase/mob/MobCacheConfig.html |    45 +-
 .../AbstractMultiFileWriter.WriterFactory.html  |   207 +-
 .../regionserver/AbstractMultiFileWriter.html   |   207 +-
 .../HRegionServer.CompactionChecker.html        |  6233 ++--
 .../HRegionServer.MovedRegionInfo.html          |  6233 ++--
 .../HRegionServer.MovedRegionsCleaner.html      |  6233 ++--
 .../HRegionServer.PeriodicMemStoreFlusher.html  |  6233 ++--
 ...RegionServer.SystemExitWhenAbortTimeout.html |  6233 ++--
 .../hbase/regionserver/HRegionServer.html       |  6233 ++--
 downloads.html                                  |     4 +-
 export_control.html                             |     4 +-
 index.html                                      |     4 +-
 integration.html                                |     4 +-
 issue-tracking.html                             |     4 +-
 license.html                                    |     4 +-
 mail-lists.html                                 |     4 +-
 metrics.html                                    |     4 +-
 old_news.html                                   |     4 +-
 plugin-management.html                          |     4 +-
 plugins.html                                    |     4 +-
 poweredbyhbase.html                             |     4 +-
 project-info.html                               |     4 +-
 project-reports.html                            |     4 +-
 project-summary.html                            |     4 +-
 pseudo-distributed.html                         |     4 +-
 replication.html                                |     4 +-
 resources.html                                  |     4 +-
 source-repository.html                          |     4 +-
 sponsors.html                                   |     4 +-
 supportingprojects.html                         |     4 +-
 team-list.html                                  |     4 +-
 .../hbase/io/encoding/TestEncodedSeekers.html   |     4 +-
 .../hbase/io/hfile/TestBlockCacheReporting.html |    46 +-
 .../hadoop/hbase/io/hfile/TestCacheConfig.html  |    12 +-
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.html |     6 +-
 .../io/hfile/TestForceCacheImportantBlocks.html |     4 +-
 .../hbase/io/hfile/TestHFileBlockIndex.html     |    14 +-
 .../hadoop/hbase/io/hfile/TestPrefetch.html     |    14 +-
 .../io/hfile/TestScannerFromBucketCache.html    |    20 +-
 .../hbase/regionserver/TestBlocksRead.html      |    34 +-
 .../regionserver/TestCacheOnWriteInSchema.html  |    10 +-
 .../regionserver/TestCompoundBloomFilter.html   |    18 +-
 .../hbase/regionserver/TestHStoreFile.html      |     2 +-
 .../hbase/regionserver/TestRecoveredEdits.html  |    18 +-
 .../hbase/io/encoding/TestEncodedSeekers.html   |   165 +-
 .../hbase/io/hfile/TestBlockCacheReporting.html |   234 +-
 .../hfile/TestCacheConfig.DataCacheEntry.html   |   360 +-
 .../io/hfile/TestCacheConfig.Deserializer.html  |   360 +-
 .../hfile/TestCacheConfig.IndexCacheEntry.html  |   360 +-
 .../hfile/TestCacheConfig.MetaCacheEntry.html   |   360 +-
 .../hadoop/hbase/io/hfile/TestCacheConfig.html  |   360 +-
 .../TestCacheOnWrite.CacheOnWriteType.html      |   138 +-
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.html |   138 +-
 .../io/hfile/TestForceCacheImportantBlocks.html |    89 +-
 .../TestHFileBlockIndex.BlockReaderWrapper.html |   517 +-
 .../hbase/io/hfile/TestHFileBlockIndex.html     |   517 +-
 .../hadoop/hbase/io/hfile/TestPrefetch.html     |   233 +-
 .../io/hfile/TestScannerFromBucketCache.html    |   495 +-
 .../TestScannerSelectionUsingKeyRange.html      |    29 +-
 .../io/hfile/TestScannerSelectionUsingTTL.html  |   123 +-
 .../hbase/regionserver/TestBlocksRead.html      |   739 +-
 .../hbase/regionserver/TestBlocksScanned.html   |     4 +-
 ...stCacheOnWriteInSchema.CacheOnWriteType.html |   263 +-
 .../regionserver/TestCacheOnWriteInSchema.html  |   263 +-
 .../regionserver/TestCompoundBloomFilter.html   |   473 +-
 .../hbase/regionserver/TestHStoreFile.html      |    22 +-
 .../hbase/regionserver/TestRecoveredEdits.html  |   340 +-
 117 files changed, 37151 insertions(+), 37492 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/acid-semantics.html
----------------------------------------------------------------------
diff --git a/acid-semantics.html b/acid-semantics.html
index 58daaef..c6ddb89 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Apache HBase (TM) ACID Properties
@@ -611,7 +611,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/apache_hbase_reference_guide.pdf
----------------------------------------------------------------------
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index f950a7f..67319f5 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20181126143304+00'00')
-/CreationDate (D:20181126144903+00'00')
+/ModDate (D:20181127143311+00'00')
+/CreationDate (D:20181127144847+00'00')
 >>
 endobj
 2 0 obj

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/book.html
----------------------------------------------------------------------
diff --git a/book.html b/book.html
index 41ea882..98a7f8f 100644
--- a/book.html
+++ b/book.html
@@ -41318,7 +41318,7 @@ org/apache/hadoop/hbase/security/access/AccessControlClient.revoke:(Lorg/apache/
 <div id="footer">
 <div id="footer-text">
 Version 3.0.0-SNAPSHOT<br>
-Last updated 2018-11-26 14:33:04 UTC
+Last updated 2018-11-27 14:33:11 UTC
 </div>
 </div>
 </body>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/bulk-loads.html
----------------------------------------------------------------------
diff --git a/bulk-loads.html b/bulk-loads.html
index c04dd4f..f9ca695 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   <head>
     <meta charset="UTF-8" />
     <meta name="viewport" content="width=device-width, initial-scale=1.0" />
-    <meta name="Date-Revision-yyyymmdd" content="20181126" />
+    <meta name="Date-Revision-yyyymmdd" content="20181127" />
     <meta http-equiv="Content-Language" content="en" />
     <title>Apache HBase &#x2013;  
       Bulk Loads in Apache HBase (TM)
@@ -316,7 +316,7 @@ under the License. -->
                         <a href="https://www.apache.org/">The Apache Software Foundation</a>.
             All rights reserved.      
                     
-                  <li id="publishDate" class="pull-right">Last Published: 2018-11-26</li>
+                  <li id="publishDate" class="pull-right">Last Published: 2018-11-27</li>
             </p>
                 </div>
 


[08/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.IndexCacheEntry.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.IndexCacheEntry.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.IndexCacheEntry.html
index f8e34f2..ea22449 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.IndexCacheEntry.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.IndexCacheEntry.html
@@ -217,184 +217,188 @@
 <span class="sourceLineNo">209</span>  @Test<a name="line.209"></a>
 <span class="sourceLineNo">210</span>  public void testDisableCacheDataBlock() throws IOException {<a name="line.210"></a>
 <span class="sourceLineNo">211</span>    Configuration conf = HBaseConfiguration.create();<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    CacheConfig cacheConfig = new CacheConfig(conf);<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    assertFalse(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.221"></a>
-<span class="sourceLineNo">222</span>    assertFalse(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    cacheConfig = new CacheConfig(conf);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    assertTrue(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    assertTrue(cacheConfig.shouldCacheDataCompressed());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    assertTrue(cacheConfig.shouldCacheDataOnWrite());<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, false);<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>    cacheConfig = new CacheConfig(conf);<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.254"></a>
-<span class="sourceLineNo">255</span><a name="line.255"></a>
-<span class="sourceLineNo">256</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, true);<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>    HColumnDescriptor family = new HColumnDescriptor("testDisableCacheDataBlock");<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    family.setBlockCacheEnabled(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    cacheConfig = new CacheConfig(conf, family);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.269"></a>
-<span class="sourceLineNo">270</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>  @Test<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  public void testCacheConfigDefaultLRUBlockCache() {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    basicBlockCacheOps(cc, false, true);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    assertTrue(cc.getBlockCache() instanceof LruBlockCache);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   */<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  @Test<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  public void testOffHeapBucketCacheConfig() {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    doBucketCacheConfigTest();<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>  @Test<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  public void testFileBucketCacheConfig() throws IOException {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    try {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      Path p = new Path(htu.getDataTestDir(), "bc.txt");<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      FileSystem fs = FileSystem.get(this.conf);<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      fs.create(p).close();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      doBucketCacheConfigTest();<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    } finally {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      htu.cleanupTestDir();<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  }<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  private void doBucketCacheConfigTest() {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final int bcSize = 100;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    basicBlockCacheOps(cc, false, false);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    BlockCache [] bcs = cbc.getBlockCaches();<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    assertTrue(bcs[0] instanceof LruBlockCache);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    LruBlockCache lbc = (LruBlockCache)bcs[0];<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    assertEquals(MemorySizeUtil.getOnHeapCacheSize(this.conf), lbc.getMaxSize());<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    assertTrue(bcs[1] instanceof BucketCache);<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    BucketCache bc = (BucketCache)bcs[1];<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  }<a name="line.323"></a>
-<span class="sourceLineNo">324</span><a name="line.324"></a>
-<span class="sourceLineNo">325</span>  /**<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * LruBlockCache as L1 with a BucketCache for L2.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  @Test<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  public void testBucketCacheConfigL1L2Setup() {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    // from L1 happens, it does not fail because L2 can't take the eviction because block too big.<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    long lruExpectedSize = MemorySizeUtil.getOnHeapCacheSize(this.conf);<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    final int bcSize = 100;<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    long bcExpectedSize = 100 * 1024 * 1024; // MB.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    assertTrue(lruExpectedSize &lt; bcExpectedSize);<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    basicBlockCacheOps(cc, false, false);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    LruBlockCache lbc = cbc.onHeapCache;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    assertEquals(lruExpectedSize, lbc.getMaxSize());<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    BlockCache bc = cbc.l2Cache;<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    long initialL1BlockCount = lbc.getBlockCount();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    long initialL2BlockCount = bc.getBlockCount();<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    Cacheable c = new DataCacheEntry();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    BlockCacheKey bck = new BlockCacheKey("bck", 0);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    lbc.cacheBlock(bck, c, false);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    assertEquals(initialL2BlockCount, bc.getBlockCount());<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    // Force evictions by putting in a block too big.<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    final long justTooBigSize = lbc.acceptableSize() + 1;<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      public long heapSize() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>        return justTooBigSize;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>      @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      public int getSerializedLength() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>        return (int)heapSize();<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      }<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    });<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // The eviction thread in lrublockcache needs to run.<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    assertEquals(initialL1BlockCount, lbc.getBlockCount());<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  @Test<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public void testL2CacheWithInvalidBucketSize() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    Configuration c = new Configuration(this.conf);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    try {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      CacheConfig.getBucketCache(c);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      fail("Should throw IllegalArgumentException when passing illegal value for bucket size");<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    } catch (IllegalArgumentException e) {<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>}<a name="line.389"></a>
+<span class="sourceLineNo">212</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    CacheConfig cacheConfig = new CacheConfig(conf);<a name="line.213"></a>
+<span class="sourceLineNo">214</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    assertFalse(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.222"></a>
+<span class="sourceLineNo">223</span>    assertFalse(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.228"></a>
+<span class="sourceLineNo">229</span><a name="line.229"></a>
+<span class="sourceLineNo">230</span>    cacheConfig = new CacheConfig(conf);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    assertTrue(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    assertTrue(cacheConfig.shouldCacheDataCompressed());<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    assertTrue(cacheConfig.shouldCacheDataOnWrite());<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.238"></a>
+<span class="sourceLineNo">239</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.240"></a>
+<span class="sourceLineNo">241</span><a name="line.241"></a>
+<span class="sourceLineNo">242</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, false);<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.243"></a>
+<span class="sourceLineNo">244</span><a name="line.244"></a>
+<span class="sourceLineNo">245</span>    cacheConfig = new CacheConfig(conf);<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.253"></a>
+<span class="sourceLineNo">254</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.255"></a>
+<span class="sourceLineNo">256</span><a name="line.256"></a>
+<span class="sourceLineNo">257</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, true);<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.258"></a>
+<span class="sourceLineNo">259</span><a name="line.259"></a>
+<span class="sourceLineNo">260</span>    HColumnDescriptor family = new HColumnDescriptor("testDisableCacheDataBlock");<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    family.setBlockCacheEnabled(false);<a name="line.261"></a>
+<span class="sourceLineNo">262</span><a name="line.262"></a>
+<span class="sourceLineNo">263</span>    cacheConfig = new CacheConfig(conf, family);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  }<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  @Test<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  public void testCacheConfigDefaultLRUBlockCache() {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    basicBlockCacheOps(cc, false, true);<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    assertTrue(cc.getBlockCache() instanceof LruBlockCache);<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  /**<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   */<a name="line.288"></a>
+<span class="sourceLineNo">289</span>  @Test<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public void testOffHeapBucketCacheConfig() {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    doBucketCacheConfigTest();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>  @Test<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  public void testFileBucketCacheConfig() throws IOException {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    try {<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      Path p = new Path(htu.getDataTestDir(), "bc.txt");<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      FileSystem fs = FileSystem.get(this.conf);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      fs.create(p).close();<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      doBucketCacheConfigTest();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    } finally {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      htu.cleanupTestDir();<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private void doBucketCacheConfigTest() {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    final int bcSize = 100;<a name="line.310"></a>
+<span class="sourceLineNo">311</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    basicBlockCacheOps(cc, false, false);<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    BlockCache [] bcs = cbc.getBlockCaches();<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    assertTrue(bcs[0] instanceof LruBlockCache);<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    LruBlockCache lbc = (LruBlockCache)bcs[0];<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    assertEquals(MemorySizeUtil.getOnHeapCacheSize(this.conf), lbc.getMaxSize());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    assertTrue(bcs[1] instanceof BucketCache);<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    BucketCache bc = (BucketCache)bcs[1];<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * LruBlockCache as L1 with a BucketCache for L2.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  @Test<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public void testBucketCacheConfigL1L2Setup() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    // from L1 happens, it does not fail because L2 can't take the eviction because block too big.<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    long lruExpectedSize = MemorySizeUtil.getOnHeapCacheSize(this.conf);<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    final int bcSize = 100;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>    long bcExpectedSize = 100 * 1024 * 1024; // MB.<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    assertTrue(lruExpectedSize &lt; bcExpectedSize);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    basicBlockCacheOps(cc, false, false);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    LruBlockCache lbc = cbc.onHeapCache;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>    assertEquals(lruExpectedSize, lbc.getMaxSize());<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    BlockCache bc = cbc.l2Cache;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    long initialL1BlockCount = lbc.getBlockCount();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    long initialL2BlockCount = bc.getBlockCount();<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    Cacheable c = new DataCacheEntry();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    BlockCacheKey bck = new BlockCacheKey("bck", 0);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    lbc.cacheBlock(bck, c, false);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    assertEquals(initialL2BlockCount, bc.getBlockCount());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    // Force evictions by putting in a block too big.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    final long justTooBigSize = lbc.acceptableSize() + 1;<a name="line.364"></a>
+<span class="sourceLineNo">365</span>    lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      @Override<a name="line.366"></a>
+<span class="sourceLineNo">367</span>      public long heapSize() {<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        return justTooBigSize;<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>      @Override<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      public int getSerializedLength() {<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        return (int)heapSize();<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      }<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    });<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // The eviction thread in lrublockcache needs to run.<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    assertEquals(initialL1BlockCount, lbc.getBlockCount());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  }<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  @Test<a name="line.381"></a>
+<span class="sourceLineNo">382</span>  public void testL2CacheWithInvalidBucketSize() {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>    Configuration c = new Configuration(this.conf);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");<a name="line.385"></a>
+<span class="sourceLineNo">386</span>    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);<a name="line.386"></a>
+<span class="sourceLineNo">387</span>    try {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      CacheConfig.getBucketCache(c);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>      fail("Should throw IllegalArgumentException when passing illegal value for bucket size");<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    } catch (IllegalArgumentException e) {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    }<a name="line.391"></a>
+<span class="sourceLineNo">392</span>  }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>}<a name="line.393"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.MetaCacheEntry.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.MetaCacheEntry.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.MetaCacheEntry.html
index f8e34f2..ea22449 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.MetaCacheEntry.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.MetaCacheEntry.html
@@ -217,184 +217,188 @@
 <span class="sourceLineNo">209</span>  @Test<a name="line.209"></a>
 <span class="sourceLineNo">210</span>  public void testDisableCacheDataBlock() throws IOException {<a name="line.210"></a>
 <span class="sourceLineNo">211</span>    Configuration conf = HBaseConfiguration.create();<a name="line.211"></a>
-<span class="sourceLineNo">212</span>    CacheConfig cacheConfig = new CacheConfig(conf);<a name="line.212"></a>
-<span class="sourceLineNo">213</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.216"></a>
-<span class="sourceLineNo">217</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    assertFalse(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.221"></a>
-<span class="sourceLineNo">222</span>    assertFalse(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.227"></a>
-<span class="sourceLineNo">228</span><a name="line.228"></a>
-<span class="sourceLineNo">229</span>    cacheConfig = new CacheConfig(conf);<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    assertTrue(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.231"></a>
-<span class="sourceLineNo">232</span>    assertTrue(cacheConfig.shouldCacheDataCompressed());<a name="line.232"></a>
-<span class="sourceLineNo">233</span>    assertTrue(cacheConfig.shouldCacheDataOnWrite());<a name="line.233"></a>
-<span class="sourceLineNo">234</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.234"></a>
-<span class="sourceLineNo">235</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.235"></a>
-<span class="sourceLineNo">236</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.236"></a>
-<span class="sourceLineNo">237</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.237"></a>
-<span class="sourceLineNo">238</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.238"></a>
-<span class="sourceLineNo">239</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.239"></a>
-<span class="sourceLineNo">240</span><a name="line.240"></a>
-<span class="sourceLineNo">241</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, false);<a name="line.241"></a>
-<span class="sourceLineNo">242</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.242"></a>
-<span class="sourceLineNo">243</span><a name="line.243"></a>
-<span class="sourceLineNo">244</span>    cacheConfig = new CacheConfig(conf);<a name="line.244"></a>
-<span class="sourceLineNo">245</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.245"></a>
-<span class="sourceLineNo">246</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.246"></a>
-<span class="sourceLineNo">247</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.247"></a>
-<span class="sourceLineNo">248</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.248"></a>
-<span class="sourceLineNo">249</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.249"></a>
-<span class="sourceLineNo">250</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.250"></a>
-<span class="sourceLineNo">251</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.251"></a>
-<span class="sourceLineNo">252</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.252"></a>
-<span class="sourceLineNo">253</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.253"></a>
-<span class="sourceLineNo">254</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.254"></a>
-<span class="sourceLineNo">255</span><a name="line.255"></a>
-<span class="sourceLineNo">256</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, true);<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.257"></a>
-<span class="sourceLineNo">258</span><a name="line.258"></a>
-<span class="sourceLineNo">259</span>    HColumnDescriptor family = new HColumnDescriptor("testDisableCacheDataBlock");<a name="line.259"></a>
-<span class="sourceLineNo">260</span>    family.setBlockCacheEnabled(false);<a name="line.260"></a>
-<span class="sourceLineNo">261</span><a name="line.261"></a>
-<span class="sourceLineNo">262</span>    cacheConfig = new CacheConfig(conf, family);<a name="line.262"></a>
-<span class="sourceLineNo">263</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.264"></a>
-<span class="sourceLineNo">265</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.265"></a>
-<span class="sourceLineNo">266</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.266"></a>
-<span class="sourceLineNo">267</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.267"></a>
-<span class="sourceLineNo">268</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.269"></a>
-<span class="sourceLineNo">270</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.270"></a>
-<span class="sourceLineNo">271</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.271"></a>
-<span class="sourceLineNo">272</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  }<a name="line.273"></a>
-<span class="sourceLineNo">274</span><a name="line.274"></a>
-<span class="sourceLineNo">275</span>  @Test<a name="line.275"></a>
-<span class="sourceLineNo">276</span>  public void testCacheConfigDefaultLRUBlockCache() {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.278"></a>
-<span class="sourceLineNo">279</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.279"></a>
-<span class="sourceLineNo">280</span>    basicBlockCacheOps(cc, false, true);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>    assertTrue(cc.getBlockCache() instanceof LruBlockCache);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>  }<a name="line.282"></a>
-<span class="sourceLineNo">283</span><a name="line.283"></a>
-<span class="sourceLineNo">284</span>  /**<a name="line.284"></a>
-<span class="sourceLineNo">285</span>   * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.<a name="line.285"></a>
-<span class="sourceLineNo">286</span>   */<a name="line.286"></a>
-<span class="sourceLineNo">287</span>  @Test<a name="line.287"></a>
-<span class="sourceLineNo">288</span>  public void testOffHeapBucketCacheConfig() {<a name="line.288"></a>
-<span class="sourceLineNo">289</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    doBucketCacheConfigTest();<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>  @Test<a name="line.293"></a>
-<span class="sourceLineNo">294</span>  public void testFileBucketCacheConfig() throws IOException {<a name="line.294"></a>
-<span class="sourceLineNo">295</span>    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);<a name="line.295"></a>
-<span class="sourceLineNo">296</span>    try {<a name="line.296"></a>
-<span class="sourceLineNo">297</span>      Path p = new Path(htu.getDataTestDir(), "bc.txt");<a name="line.297"></a>
-<span class="sourceLineNo">298</span>      FileSystem fs = FileSystem.get(this.conf);<a name="line.298"></a>
-<span class="sourceLineNo">299</span>      fs.create(p).close();<a name="line.299"></a>
-<span class="sourceLineNo">300</span>      this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);<a name="line.300"></a>
-<span class="sourceLineNo">301</span>      doBucketCacheConfigTest();<a name="line.301"></a>
-<span class="sourceLineNo">302</span>    } finally {<a name="line.302"></a>
-<span class="sourceLineNo">303</span>      htu.cleanupTestDir();<a name="line.303"></a>
-<span class="sourceLineNo">304</span>    }<a name="line.304"></a>
-<span class="sourceLineNo">305</span>  }<a name="line.305"></a>
-<span class="sourceLineNo">306</span><a name="line.306"></a>
-<span class="sourceLineNo">307</span>  private void doBucketCacheConfigTest() {<a name="line.307"></a>
-<span class="sourceLineNo">308</span>    final int bcSize = 100;<a name="line.308"></a>
-<span class="sourceLineNo">309</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.309"></a>
-<span class="sourceLineNo">310</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.310"></a>
-<span class="sourceLineNo">311</span>    basicBlockCacheOps(cc, false, false);<a name="line.311"></a>
-<span class="sourceLineNo">312</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.312"></a>
-<span class="sourceLineNo">313</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.313"></a>
-<span class="sourceLineNo">314</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.314"></a>
-<span class="sourceLineNo">315</span>    BlockCache [] bcs = cbc.getBlockCaches();<a name="line.315"></a>
-<span class="sourceLineNo">316</span>    assertTrue(bcs[0] instanceof LruBlockCache);<a name="line.316"></a>
-<span class="sourceLineNo">317</span>    LruBlockCache lbc = (LruBlockCache)bcs[0];<a name="line.317"></a>
-<span class="sourceLineNo">318</span>    assertEquals(MemorySizeUtil.getOnHeapCacheSize(this.conf), lbc.getMaxSize());<a name="line.318"></a>
-<span class="sourceLineNo">319</span>    assertTrue(bcs[1] instanceof BucketCache);<a name="line.319"></a>
-<span class="sourceLineNo">320</span>    BucketCache bc = (BucketCache)bcs[1];<a name="line.320"></a>
-<span class="sourceLineNo">321</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.321"></a>
-<span class="sourceLineNo">322</span>    assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));<a name="line.322"></a>
-<span class="sourceLineNo">323</span>  }<a name="line.323"></a>
-<span class="sourceLineNo">324</span><a name="line.324"></a>
-<span class="sourceLineNo">325</span>  /**<a name="line.325"></a>
-<span class="sourceLineNo">326</span>   * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy<a name="line.326"></a>
-<span class="sourceLineNo">327</span>   * LruBlockCache as L1 with a BucketCache for L2.<a name="line.327"></a>
-<span class="sourceLineNo">328</span>   */<a name="line.328"></a>
-<span class="sourceLineNo">329</span>  @Test<a name="line.329"></a>
-<span class="sourceLineNo">330</span>  public void testBucketCacheConfigL1L2Setup() {<a name="line.330"></a>
-<span class="sourceLineNo">331</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.331"></a>
-<span class="sourceLineNo">332</span>    // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction<a name="line.332"></a>
-<span class="sourceLineNo">333</span>    // from L1 happens, it does not fail because L2 can't take the eviction because block too big.<a name="line.333"></a>
-<span class="sourceLineNo">334</span>    this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);<a name="line.334"></a>
-<span class="sourceLineNo">335</span>    MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();<a name="line.335"></a>
-<span class="sourceLineNo">336</span>    long lruExpectedSize = MemorySizeUtil.getOnHeapCacheSize(this.conf);<a name="line.336"></a>
-<span class="sourceLineNo">337</span>    final int bcSize = 100;<a name="line.337"></a>
-<span class="sourceLineNo">338</span>    long bcExpectedSize = 100 * 1024 * 1024; // MB.<a name="line.338"></a>
-<span class="sourceLineNo">339</span>    assertTrue(lruExpectedSize &lt; bcExpectedSize);<a name="line.339"></a>
-<span class="sourceLineNo">340</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.340"></a>
-<span class="sourceLineNo">341</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.341"></a>
-<span class="sourceLineNo">342</span>    basicBlockCacheOps(cc, false, false);<a name="line.342"></a>
-<span class="sourceLineNo">343</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.343"></a>
-<span class="sourceLineNo">344</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.344"></a>
-<span class="sourceLineNo">345</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.345"></a>
-<span class="sourceLineNo">346</span>    LruBlockCache lbc = cbc.onHeapCache;<a name="line.346"></a>
-<span class="sourceLineNo">347</span>    assertEquals(lruExpectedSize, lbc.getMaxSize());<a name="line.347"></a>
-<span class="sourceLineNo">348</span>    BlockCache bc = cbc.l2Cache;<a name="line.348"></a>
-<span class="sourceLineNo">349</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.349"></a>
-<span class="sourceLineNo">350</span>    assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());<a name="line.350"></a>
-<span class="sourceLineNo">351</span>    // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.<a name="line.351"></a>
-<span class="sourceLineNo">352</span>    long initialL1BlockCount = lbc.getBlockCount();<a name="line.352"></a>
-<span class="sourceLineNo">353</span>    long initialL2BlockCount = bc.getBlockCount();<a name="line.353"></a>
-<span class="sourceLineNo">354</span>    Cacheable c = new DataCacheEntry();<a name="line.354"></a>
-<span class="sourceLineNo">355</span>    BlockCacheKey bck = new BlockCacheKey("bck", 0);<a name="line.355"></a>
-<span class="sourceLineNo">356</span>    lbc.cacheBlock(bck, c, false);<a name="line.356"></a>
-<span class="sourceLineNo">357</span>    assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());<a name="line.357"></a>
-<span class="sourceLineNo">358</span>    assertEquals(initialL2BlockCount, bc.getBlockCount());<a name="line.358"></a>
-<span class="sourceLineNo">359</span>    // Force evictions by putting in a block too big.<a name="line.359"></a>
-<span class="sourceLineNo">360</span>    final long justTooBigSize = lbc.acceptableSize() + 1;<a name="line.360"></a>
-<span class="sourceLineNo">361</span>    lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {<a name="line.361"></a>
-<span class="sourceLineNo">362</span>      @Override<a name="line.362"></a>
-<span class="sourceLineNo">363</span>      public long heapSize() {<a name="line.363"></a>
-<span class="sourceLineNo">364</span>        return justTooBigSize;<a name="line.364"></a>
-<span class="sourceLineNo">365</span>      }<a name="line.365"></a>
-<span class="sourceLineNo">366</span><a name="line.366"></a>
-<span class="sourceLineNo">367</span>      @Override<a name="line.367"></a>
-<span class="sourceLineNo">368</span>      public int getSerializedLength() {<a name="line.368"></a>
-<span class="sourceLineNo">369</span>        return (int)heapSize();<a name="line.369"></a>
-<span class="sourceLineNo">370</span>      }<a name="line.370"></a>
-<span class="sourceLineNo">371</span>    });<a name="line.371"></a>
-<span class="sourceLineNo">372</span>    // The eviction thread in lrublockcache needs to run.<a name="line.372"></a>
-<span class="sourceLineNo">373</span>    while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);<a name="line.373"></a>
-<span class="sourceLineNo">374</span>    assertEquals(initialL1BlockCount, lbc.getBlockCount());<a name="line.374"></a>
-<span class="sourceLineNo">375</span>  }<a name="line.375"></a>
-<span class="sourceLineNo">376</span><a name="line.376"></a>
-<span class="sourceLineNo">377</span>  @Test<a name="line.377"></a>
-<span class="sourceLineNo">378</span>  public void testL2CacheWithInvalidBucketSize() {<a name="line.378"></a>
-<span class="sourceLineNo">379</span>    Configuration c = new Configuration(this.conf);<a name="line.379"></a>
-<span class="sourceLineNo">380</span>    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.380"></a>
-<span class="sourceLineNo">381</span>    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");<a name="line.381"></a>
-<span class="sourceLineNo">382</span>    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);<a name="line.382"></a>
-<span class="sourceLineNo">383</span>    try {<a name="line.383"></a>
-<span class="sourceLineNo">384</span>      CacheConfig.getBucketCache(c);<a name="line.384"></a>
-<span class="sourceLineNo">385</span>      fail("Should throw IllegalArgumentException when passing illegal value for bucket size");<a name="line.385"></a>
-<span class="sourceLineNo">386</span>    } catch (IllegalArgumentException e) {<a name="line.386"></a>
-<span class="sourceLineNo">387</span>    }<a name="line.387"></a>
-<span class="sourceLineNo">388</span>  }<a name="line.388"></a>
-<span class="sourceLineNo">389</span>}<a name="line.389"></a>
+<span class="sourceLineNo">212</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.212"></a>
+<span class="sourceLineNo">213</span>    CacheConfig cacheConfig = new CacheConfig(conf);<a name="line.213"></a>
+<span class="sourceLineNo">214</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.214"></a>
+<span class="sourceLineNo">215</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.217"></a>
+<span class="sourceLineNo">218</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    assertFalse(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.222"></a>
+<span class="sourceLineNo">223</span>    assertFalse(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true);<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, true);<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);<a name="line.228"></a>
+<span class="sourceLineNo">229</span><a name="line.229"></a>
+<span class="sourceLineNo">230</span>    cacheConfig = new CacheConfig(conf);<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.231"></a>
+<span class="sourceLineNo">232</span>    assertTrue(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.232"></a>
+<span class="sourceLineNo">233</span>    assertTrue(cacheConfig.shouldCacheDataCompressed());<a name="line.233"></a>
+<span class="sourceLineNo">234</span>    assertTrue(cacheConfig.shouldCacheDataOnWrite());<a name="line.234"></a>
+<span class="sourceLineNo">235</span>    assertTrue(cacheConfig.shouldCacheDataOnRead());<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.236"></a>
+<span class="sourceLineNo">237</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.237"></a>
+<span class="sourceLineNo">238</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.238"></a>
+<span class="sourceLineNo">239</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.239"></a>
+<span class="sourceLineNo">240</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.240"></a>
+<span class="sourceLineNo">241</span><a name="line.241"></a>
+<span class="sourceLineNo">242</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, false);<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.243"></a>
+<span class="sourceLineNo">244</span><a name="line.244"></a>
+<span class="sourceLineNo">245</span>    cacheConfig = new CacheConfig(conf);<a name="line.245"></a>
+<span class="sourceLineNo">246</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.246"></a>
+<span class="sourceLineNo">247</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.247"></a>
+<span class="sourceLineNo">248</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.248"></a>
+<span class="sourceLineNo">249</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.249"></a>
+<span class="sourceLineNo">250</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.250"></a>
+<span class="sourceLineNo">251</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.251"></a>
+<span class="sourceLineNo">252</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.253"></a>
+<span class="sourceLineNo">254</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.254"></a>
+<span class="sourceLineNo">255</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.255"></a>
+<span class="sourceLineNo">256</span><a name="line.256"></a>
+<span class="sourceLineNo">257</span>    conf.setBoolean(CacheConfig.CACHE_DATA_ON_READ_KEY, true);<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.258"></a>
+<span class="sourceLineNo">259</span><a name="line.259"></a>
+<span class="sourceLineNo">260</span>    HColumnDescriptor family = new HColumnDescriptor("testDisableCacheDataBlock");<a name="line.260"></a>
+<span class="sourceLineNo">261</span>    family.setBlockCacheEnabled(false);<a name="line.261"></a>
+<span class="sourceLineNo">262</span><a name="line.262"></a>
+<span class="sourceLineNo">263</span>    cacheConfig = new CacheConfig(conf, family);<a name="line.263"></a>
+<span class="sourceLineNo">264</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));<a name="line.265"></a>
+<span class="sourceLineNo">266</span>    assertFalse(cacheConfig.shouldCacheDataCompressed());<a name="line.266"></a>
+<span class="sourceLineNo">267</span>    assertFalse(cacheConfig.shouldCacheDataOnWrite());<a name="line.267"></a>
+<span class="sourceLineNo">268</span>    assertFalse(cacheConfig.shouldCacheDataOnRead());<a name="line.268"></a>
+<span class="sourceLineNo">269</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.INDEX));<a name="line.269"></a>
+<span class="sourceLineNo">270</span>    assertFalse(cacheConfig.shouldCacheBlockOnRead(BlockCategory.META));<a name="line.270"></a>
+<span class="sourceLineNo">271</span>    assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.BLOOM));<a name="line.271"></a>
+<span class="sourceLineNo">272</span>    assertTrue(cacheConfig.shouldCacheBloomsOnWrite());<a name="line.272"></a>
+<span class="sourceLineNo">273</span>    assertTrue(cacheConfig.shouldCacheIndexesOnWrite());<a name="line.273"></a>
+<span class="sourceLineNo">274</span>  }<a name="line.274"></a>
+<span class="sourceLineNo">275</span><a name="line.275"></a>
+<span class="sourceLineNo">276</span>  @Test<a name="line.276"></a>
+<span class="sourceLineNo">277</span>  public void testCacheConfigDefaultLRUBlockCache() {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.279"></a>
+<span class="sourceLineNo">280</span>    assertTrue(cc.isBlockCacheEnabled());<a name="line.280"></a>
+<span class="sourceLineNo">281</span>    assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());<a name="line.281"></a>
+<span class="sourceLineNo">282</span>    basicBlockCacheOps(cc, false, true);<a name="line.282"></a>
+<span class="sourceLineNo">283</span>    assertTrue(cc.getBlockCache() instanceof LruBlockCache);<a name="line.283"></a>
+<span class="sourceLineNo">284</span>  }<a name="line.284"></a>
+<span class="sourceLineNo">285</span><a name="line.285"></a>
+<span class="sourceLineNo">286</span>  /**<a name="line.286"></a>
+<span class="sourceLineNo">287</span>   * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.<a name="line.287"></a>
+<span class="sourceLineNo">288</span>   */<a name="line.288"></a>
+<span class="sourceLineNo">289</span>  @Test<a name="line.289"></a>
+<span class="sourceLineNo">290</span>  public void testOffHeapBucketCacheConfig() {<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.291"></a>
+<span class="sourceLineNo">292</span>    doBucketCacheConfigTest();<a name="line.292"></a>
+<span class="sourceLineNo">293</span>  }<a name="line.293"></a>
+<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">295</span>  @Test<a name="line.295"></a>
+<span class="sourceLineNo">296</span>  public void testFileBucketCacheConfig() throws IOException {<a name="line.296"></a>
+<span class="sourceLineNo">297</span>    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);<a name="line.297"></a>
+<span class="sourceLineNo">298</span>    try {<a name="line.298"></a>
+<span class="sourceLineNo">299</span>      Path p = new Path(htu.getDataTestDir(), "bc.txt");<a name="line.299"></a>
+<span class="sourceLineNo">300</span>      FileSystem fs = FileSystem.get(this.conf);<a name="line.300"></a>
+<span class="sourceLineNo">301</span>      fs.create(p).close();<a name="line.301"></a>
+<span class="sourceLineNo">302</span>      this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);<a name="line.302"></a>
+<span class="sourceLineNo">303</span>      doBucketCacheConfigTest();<a name="line.303"></a>
+<span class="sourceLineNo">304</span>    } finally {<a name="line.304"></a>
+<span class="sourceLineNo">305</span>      htu.cleanupTestDir();<a name="line.305"></a>
+<span class="sourceLineNo">306</span>    }<a name="line.306"></a>
+<span class="sourceLineNo">307</span>  }<a name="line.307"></a>
+<span class="sourceLineNo">308</span><a name="line.308"></a>
+<span class="sourceLineNo">309</span>  private void doBucketCacheConfigTest() {<a name="line.309"></a>
+<span class="sourceLineNo">310</span>    final int bcSize = 100;<a name="line.310"></a>
+<span class="sourceLineNo">311</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.311"></a>
+<span class="sourceLineNo">312</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.312"></a>
+<span class="sourceLineNo">313</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.313"></a>
+<span class="sourceLineNo">314</span>    basicBlockCacheOps(cc, false, false);<a name="line.314"></a>
+<span class="sourceLineNo">315</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.315"></a>
+<span class="sourceLineNo">316</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.316"></a>
+<span class="sourceLineNo">317</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.317"></a>
+<span class="sourceLineNo">318</span>    BlockCache [] bcs = cbc.getBlockCaches();<a name="line.318"></a>
+<span class="sourceLineNo">319</span>    assertTrue(bcs[0] instanceof LruBlockCache);<a name="line.319"></a>
+<span class="sourceLineNo">320</span>    LruBlockCache lbc = (LruBlockCache)bcs[0];<a name="line.320"></a>
+<span class="sourceLineNo">321</span>    assertEquals(MemorySizeUtil.getOnHeapCacheSize(this.conf), lbc.getMaxSize());<a name="line.321"></a>
+<span class="sourceLineNo">322</span>    assertTrue(bcs[1] instanceof BucketCache);<a name="line.322"></a>
+<span class="sourceLineNo">323</span>    BucketCache bc = (BucketCache)bcs[1];<a name="line.323"></a>
+<span class="sourceLineNo">324</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.324"></a>
+<span class="sourceLineNo">325</span>    assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));<a name="line.325"></a>
+<span class="sourceLineNo">326</span>  }<a name="line.326"></a>
+<span class="sourceLineNo">327</span><a name="line.327"></a>
+<span class="sourceLineNo">328</span>  /**<a name="line.328"></a>
+<span class="sourceLineNo">329</span>   * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy<a name="line.329"></a>
+<span class="sourceLineNo">330</span>   * LruBlockCache as L1 with a BucketCache for L2.<a name="line.330"></a>
+<span class="sourceLineNo">331</span>   */<a name="line.331"></a>
+<span class="sourceLineNo">332</span>  @Test<a name="line.332"></a>
+<span class="sourceLineNo">333</span>  public void testBucketCacheConfigL1L2Setup() {<a name="line.333"></a>
+<span class="sourceLineNo">334</span>    this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.334"></a>
+<span class="sourceLineNo">335</span>    // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction<a name="line.335"></a>
+<span class="sourceLineNo">336</span>    // from L1 happens, it does not fail because L2 can't take the eviction because block too big.<a name="line.336"></a>
+<span class="sourceLineNo">337</span>    this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);<a name="line.337"></a>
+<span class="sourceLineNo">338</span>    MemoryUsage mu = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();<a name="line.338"></a>
+<span class="sourceLineNo">339</span>    long lruExpectedSize = MemorySizeUtil.getOnHeapCacheSize(this.conf);<a name="line.339"></a>
+<span class="sourceLineNo">340</span>    final int bcSize = 100;<a name="line.340"></a>
+<span class="sourceLineNo">341</span>    long bcExpectedSize = 100 * 1024 * 1024; // MB.<a name="line.341"></a>
+<span class="sourceLineNo">342</span>    assertTrue(lruExpectedSize &lt; bcExpectedSize);<a name="line.342"></a>
+<span class="sourceLineNo">343</span>    this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);<a name="line.343"></a>
+<span class="sourceLineNo">344</span>    CacheConfig.instantiateBlockCache(this.conf);<a name="line.344"></a>
+<span class="sourceLineNo">345</span>    CacheConfig cc = new CacheConfig(this.conf);<a name="line.345"></a>
+<span class="sourceLineNo">346</span>    basicBlockCacheOps(cc, false, false);<a name="line.346"></a>
+<span class="sourceLineNo">347</span>    assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);<a name="line.347"></a>
+<span class="sourceLineNo">348</span>    // TODO: Assert sizes allocated are right and proportions.<a name="line.348"></a>
+<span class="sourceLineNo">349</span>    CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();<a name="line.349"></a>
+<span class="sourceLineNo">350</span>    LruBlockCache lbc = cbc.onHeapCache;<a name="line.350"></a>
+<span class="sourceLineNo">351</span>    assertEquals(lruExpectedSize, lbc.getMaxSize());<a name="line.351"></a>
+<span class="sourceLineNo">352</span>    BlockCache bc = cbc.l2Cache;<a name="line.352"></a>
+<span class="sourceLineNo">353</span>    // getMaxSize comes back in bytes but we specified size in MB<a name="line.353"></a>
+<span class="sourceLineNo">354</span>    assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());<a name="line.354"></a>
+<span class="sourceLineNo">355</span>    // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.<a name="line.355"></a>
+<span class="sourceLineNo">356</span>    long initialL1BlockCount = lbc.getBlockCount();<a name="line.356"></a>
+<span class="sourceLineNo">357</span>    long initialL2BlockCount = bc.getBlockCount();<a name="line.357"></a>
+<span class="sourceLineNo">358</span>    Cacheable c = new DataCacheEntry();<a name="line.358"></a>
+<span class="sourceLineNo">359</span>    BlockCacheKey bck = new BlockCacheKey("bck", 0);<a name="line.359"></a>
+<span class="sourceLineNo">360</span>    lbc.cacheBlock(bck, c, false);<a name="line.360"></a>
+<span class="sourceLineNo">361</span>    assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());<a name="line.361"></a>
+<span class="sourceLineNo">362</span>    assertEquals(initialL2BlockCount, bc.getBlockCount());<a name="line.362"></a>
+<span class="sourceLineNo">363</span>    // Force evictions by putting in a block too big.<a name="line.363"></a>
+<span class="sourceLineNo">364</span>    final long justTooBigSize = lbc.acceptableSize() + 1;<a name="line.364"></a>
+<span class="sourceLineNo">365</span>    lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {<a name="line.365"></a>
+<span class="sourceLineNo">366</span>      @Override<a name="line.366"></a>
+<span class="sourceLineNo">367</span>      public long heapSize() {<a name="line.367"></a>
+<span class="sourceLineNo">368</span>        return justTooBigSize;<a name="line.368"></a>
+<span class="sourceLineNo">369</span>      }<a name="line.369"></a>
+<span class="sourceLineNo">370</span><a name="line.370"></a>
+<span class="sourceLineNo">371</span>      @Override<a name="line.371"></a>
+<span class="sourceLineNo">372</span>      public int getSerializedLength() {<a name="line.372"></a>
+<span class="sourceLineNo">373</span>        return (int)heapSize();<a name="line.373"></a>
+<span class="sourceLineNo">374</span>      }<a name="line.374"></a>
+<span class="sourceLineNo">375</span>    });<a name="line.375"></a>
+<span class="sourceLineNo">376</span>    // The eviction thread in lrublockcache needs to run.<a name="line.376"></a>
+<span class="sourceLineNo">377</span>    while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);<a name="line.377"></a>
+<span class="sourceLineNo">378</span>    assertEquals(initialL1BlockCount, lbc.getBlockCount());<a name="line.378"></a>
+<span class="sourceLineNo">379</span>  }<a name="line.379"></a>
+<span class="sourceLineNo">380</span><a name="line.380"></a>
+<span class="sourceLineNo">381</span>  @Test<a name="line.381"></a>
+<span class="sourceLineNo">382</span>  public void testL2CacheWithInvalidBucketSize() {<a name="line.382"></a>
+<span class="sourceLineNo">383</span>    Configuration c = new Configuration(this.conf);<a name="line.383"></a>
+<span class="sourceLineNo">384</span>    c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");<a name="line.384"></a>
+<span class="sourceLineNo">385</span>    c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");<a name="line.385"></a>
+<span class="sourceLineNo">386</span>    c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);<a name="line.386"></a>
+<span class="sourceLineNo">387</span>    try {<a name="line.387"></a>
+<span class="sourceLineNo">388</span>      CacheConfig.getBucketCache(c);<a name="line.388"></a>
+<span class="sourceLineNo">389</span>      fail("Should throw IllegalArgumentException when passing illegal value for bucket size");<a name="line.389"></a>
+<span class="sourceLineNo">390</span>    } catch (IllegalArgumentException e) {<a name="line.390"></a>
+<span class="sourceLineNo">391</span>    }<a name="line.391"></a>
+<span class="sourceLineNo">392</span>  }<a name="line.392"></a>
+<span class="sourceLineNo">393</span>}<a name="line.393"></a>
 
 
 


[02/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html
index 29963fb..3494514 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.CacheOnWriteType.html
@@ -169,137 +169,138 @@
 <span class="sourceLineNo">161</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.161"></a>
 <span class="sourceLineNo">162</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false);<a name="line.162"></a>
 <span class="sourceLineNo">163</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, false);<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    fs = HFileSystem.get(conf);<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>    // Create the schema<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    ColumnFamilyDescriptor hcd = cowType<a name="line.168"></a>
-<span class="sourceLineNo">169</span>        .modifyFamilySchema(<a name="line.169"></a>
-<span class="sourceLineNo">170</span>          ColumnFamilyDescriptorBuilder.newBuilder(family).setBloomFilterType(BloomType.ROWCOL))<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        .build();<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    TableDescriptor htd =<a name="line.172"></a>
-<span class="sourceLineNo">173</span>        TableDescriptorBuilder.newBuilder(TableName.valueOf(table)).setColumnFamily(hcd).build();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    // Create a store based on the schema<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    String id = TestCacheOnWriteInSchema.class.getName();<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    Path logdir = new Path(FSUtils.getRootDir(conf), AbstractFSWALProvider.getWALDirectoryName(id));<a name="line.177"></a>
-<span class="sourceLineNo">178</span>    fs.delete(logdir, true);<a name="line.178"></a>
-<span class="sourceLineNo">179</span><a name="line.179"></a>
-<span class="sourceLineNo">180</span>    RegionInfo info = RegionInfoBuilder.newBuilder(htd.getTableName()).build();<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    walFactory = new WALFactory(conf, id);<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>    region = TEST_UTIL.createLocalHRegion(info, htd, walFactory.getWAL(info));<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    store = new HStore(region, hcd, conf);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  }<a name="line.185"></a>
-<span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  @After<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  public void tearDown() throws IOException {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    IOException ex = null;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    try {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      region.close();<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    } catch (IOException e) {<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      LOG.warn("Caught Exception", e);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      ex = e;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    }<a name="line.195"></a>
-<span class="sourceLineNo">196</span>    try {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>      walFactory.close();<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    } catch (IOException e) {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      LOG.warn("Caught Exception", e);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      ex = e;<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    try {<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      fs.delete(new Path(DIR), true);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    } catch (IOException e) {<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      LOG.error("Could not delete " + DIR, e);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      ex = e;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    }<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    if (ex != null) {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      throw ex;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>    }<a name="line.210"></a>
-<span class="sourceLineNo">211</span>  }<a name="line.211"></a>
-<span class="sourceLineNo">212</span><a name="line.212"></a>
-<span class="sourceLineNo">213</span>  @Test<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  public void testCacheOnWriteInSchema() throws IOException {<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    // Write some random data into the store<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    StoreFileWriter writer = store.createWriterInTmp(Integer.MAX_VALUE,<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        HFile.DEFAULT_COMPRESSION_ALGORITHM, false, true, false, false);<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    writeStoreFile(writer);<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    writer.close();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    // Verify the block types of interest were cached on write<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    readStoreFile(writer.getPath());<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  }<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private void readStoreFile(Path path) throws IOException {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    CacheConfig cacheConf = store.getCacheConfig();<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    BlockCache cache = cacheConf.getBlockCache();<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    HStoreFile sf = new HStoreFile(fs, path, conf, cacheConf, BloomType.ROWCOL, true);<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    sf.initReader();<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    HFile.Reader reader = sf.getReader().getHFileReader();<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    try {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      // Open a scanner with (on read) caching disabled<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      HFileScanner scanner = reader.getScanner(false, false);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      assertTrue(testDescription, scanner.seekTo());<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      // Cribbed from io.hfile.TestCacheOnWrite<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      long offset = 0;<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      while (offset &lt; reader.getTrailer().getLoadOnOpenDataOffset()) {<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        // Flags: don't cache the block, use pread, this is not a compaction.<a name="line.237"></a>
-<span class="sourceLineNo">238</span>        // Also, pass null for expected block type to avoid checking it.<a name="line.238"></a>
-<span class="sourceLineNo">239</span>        HFileBlock block = reader.readBlock(offset, -1, false, true,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>          false, true, null, DataBlockEncoding.NONE);<a name="line.240"></a>
-<span class="sourceLineNo">241</span>        BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(),<a name="line.241"></a>
-<span class="sourceLineNo">242</span>          offset);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>        boolean isCached = cache.getBlock(blockCacheKey, true, false, true) != null;<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        boolean shouldBeCached = cowType.shouldBeCached(block.getBlockType());<a name="line.244"></a>
-<span class="sourceLineNo">245</span>        if (shouldBeCached != isCached) {<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          throw new AssertionError(<a name="line.246"></a>
-<span class="sourceLineNo">247</span>            "shouldBeCached: " + shouldBeCached+ "\n" +<a name="line.247"></a>
-<span class="sourceLineNo">248</span>            "isCached: " + isCached + "\n" +<a name="line.248"></a>
-<span class="sourceLineNo">249</span>            "Test description: " + testDescription + "\n" +<a name="line.249"></a>
-<span class="sourceLineNo">250</span>            "block: " + block + "\n" +<a name="line.250"></a>
-<span class="sourceLineNo">251</span>            "blockCacheKey: " + blockCacheKey);<a name="line.251"></a>
-<span class="sourceLineNo">252</span>        }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        offset += block.getOnDiskSizeWithHeader();<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      }<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    } finally {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      reader.close();<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
-<span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private static KeyValue.Type generateKeyType(Random rand) {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    if (rand.nextBoolean()) {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Let's make half of KVs puts.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      return KeyValue.Type.Put;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } else {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      KeyValue.Type keyType =<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum)<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        throw new RuntimeException("Generated an invalid key type: " + keyType<a name="line.269"></a>
-<span class="sourceLineNo">270</span>            + ". " + "Probably the layout of KeyValue.Type has changed.");<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      }<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      return keyType;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  }<a name="line.274"></a>
-<span class="sourceLineNo">275</span><a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private void writeStoreFile(StoreFileWriter writer) throws IOException {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    final int rowLen = 32;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      byte[] k = RandomKeyValueUtil.randomOrderedKey(rand, i);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      byte[] v = RandomKeyValueUtil.randomValue(rand);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      int cfLen = rand.nextInt(k.length - rowLen + 1);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      KeyValue kv = new KeyValue(<a name="line.282"></a>
-<span class="sourceLineNo">283</span>          k, 0, rowLen,<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          k, rowLen, cfLen,<a name="line.284"></a>
-<span class="sourceLineNo">285</span>          k, rowLen + cfLen, k.length - rowLen - cfLen,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          rand.nextLong(),<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          generateKeyType(rand),<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          v, 0, v.length);<a name="line.288"></a>
-<span class="sourceLineNo">289</span>      writer.append(kv);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>}<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">164</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span>    fs = HFileSystem.get(conf);<a name="line.166"></a>
+<span class="sourceLineNo">167</span><a name="line.167"></a>
+<span class="sourceLineNo">168</span>    // Create the schema<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    ColumnFamilyDescriptor hcd = cowType<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        .modifyFamilySchema(<a name="line.170"></a>
+<span class="sourceLineNo">171</span>          ColumnFamilyDescriptorBuilder.newBuilder(family).setBloomFilterType(BloomType.ROWCOL))<a name="line.171"></a>
+<span class="sourceLineNo">172</span>        .build();<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    TableDescriptor htd =<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        TableDescriptorBuilder.newBuilder(TableName.valueOf(table)).setColumnFamily(hcd).build();<a name="line.174"></a>
+<span class="sourceLineNo">175</span><a name="line.175"></a>
+<span class="sourceLineNo">176</span>    // Create a store based on the schema<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    String id = TestCacheOnWriteInSchema.class.getName();<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    Path logdir = new Path(FSUtils.getRootDir(conf), AbstractFSWALProvider.getWALDirectoryName(id));<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    fs.delete(logdir, true);<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    RegionInfo info = RegionInfoBuilder.newBuilder(htd.getTableName()).build();<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    walFactory = new WALFactory(conf, id);<a name="line.182"></a>
+<span class="sourceLineNo">183</span><a name="line.183"></a>
+<span class="sourceLineNo">184</span>    region = TEST_UTIL.createLocalHRegion(info, htd, walFactory.getWAL(info));<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    store = new HStore(region, hcd, conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  @After<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  public void tearDown() throws IOException {<a name="line.189"></a>
+<span class="sourceLineNo">190</span>    IOException ex = null;<a name="line.190"></a>
+<span class="sourceLineNo">191</span>    try {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      region.close();<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    } catch (IOException e) {<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      LOG.warn("Caught Exception", e);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      ex = e;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    try {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      walFactory.close();<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    } catch (IOException e) {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      LOG.warn("Caught Exception", e);<a name="line.200"></a>
+<span class="sourceLineNo">201</span>      ex = e;<a name="line.201"></a>
+<span class="sourceLineNo">202</span>    }<a name="line.202"></a>
+<span class="sourceLineNo">203</span>    try {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      fs.delete(new Path(DIR), true);<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    } catch (IOException e) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      LOG.error("Could not delete " + DIR, e);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      ex = e;<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    }<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    if (ex != null) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      throw ex;<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    }<a name="line.211"></a>
+<span class="sourceLineNo">212</span>  }<a name="line.212"></a>
+<span class="sourceLineNo">213</span><a name="line.213"></a>
+<span class="sourceLineNo">214</span>  @Test<a name="line.214"></a>
+<span class="sourceLineNo">215</span>  public void testCacheOnWriteInSchema() throws IOException {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    // Write some random data into the store<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    StoreFileWriter writer = store.createWriterInTmp(Integer.MAX_VALUE,<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        HFile.DEFAULT_COMPRESSION_ALGORITHM, false, true, false, false);<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    writeStoreFile(writer);<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    writer.close();<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    // Verify the block types of interest were cached on write<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    readStoreFile(writer.getPath());<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  }<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>  private void readStoreFile(Path path) throws IOException {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    CacheConfig cacheConf = store.getCacheConfig();<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    BlockCache cache = cacheConf.getBlockCache();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    HStoreFile sf = new HStoreFile(fs, path, conf, cacheConf, BloomType.ROWCOL, true);<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    sf.initReader();<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    HFile.Reader reader = sf.getReader().getHFileReader();<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    try {<a name="line.231"></a>
+<span class="sourceLineNo">232</span>      // Open a scanner with (on read) caching disabled<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      HFileScanner scanner = reader.getScanner(false, false);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      assertTrue(testDescription, scanner.seekTo());<a name="line.234"></a>
+<span class="sourceLineNo">235</span>      // Cribbed from io.hfile.TestCacheOnWrite<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      long offset = 0;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      while (offset &lt; reader.getTrailer().getLoadOnOpenDataOffset()) {<a name="line.237"></a>
+<span class="sourceLineNo">238</span>        // Flags: don't cache the block, use pread, this is not a compaction.<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        // Also, pass null for expected block type to avoid checking it.<a name="line.239"></a>
+<span class="sourceLineNo">240</span>        HFileBlock block = reader.readBlock(offset, -1, false, true,<a name="line.240"></a>
+<span class="sourceLineNo">241</span>          false, true, null, DataBlockEncoding.NONE);<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(),<a name="line.242"></a>
+<span class="sourceLineNo">243</span>          offset);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>        boolean isCached = cache.getBlock(blockCacheKey, true, false, true) != null;<a name="line.244"></a>
+<span class="sourceLineNo">245</span>        boolean shouldBeCached = cowType.shouldBeCached(block.getBlockType());<a name="line.245"></a>
+<span class="sourceLineNo">246</span>        if (shouldBeCached != isCached) {<a name="line.246"></a>
+<span class="sourceLineNo">247</span>          throw new AssertionError(<a name="line.247"></a>
+<span class="sourceLineNo">248</span>            "shouldBeCached: " + shouldBeCached+ "\n" +<a name="line.248"></a>
+<span class="sourceLineNo">249</span>            "isCached: " + isCached + "\n" +<a name="line.249"></a>
+<span class="sourceLineNo">250</span>            "Test description: " + testDescription + "\n" +<a name="line.250"></a>
+<span class="sourceLineNo">251</span>            "block: " + block + "\n" +<a name="line.251"></a>
+<span class="sourceLineNo">252</span>            "blockCacheKey: " + blockCacheKey);<a name="line.252"></a>
+<span class="sourceLineNo">253</span>        }<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        offset += block.getOnDiskSizeWithHeader();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      }<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    } finally {<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      reader.close();<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    }<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  }<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  private static KeyValue.Type generateKeyType(Random rand) {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    if (rand.nextBoolean()) {<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      // Let's make half of KVs puts.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      return KeyValue.Type.Put;<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    } else {<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      KeyValue.Type keyType =<a name="line.266"></a>
+<span class="sourceLineNo">267</span>          KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum)<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      {<a name="line.269"></a>
+<span class="sourceLineNo">270</span>        throw new RuntimeException("Generated an invalid key type: " + keyType<a name="line.270"></a>
+<span class="sourceLineNo">271</span>            + ". " + "Probably the layout of KeyValue.Type has changed.");<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      return keyType;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    }<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  }<a name="line.275"></a>
+<span class="sourceLineNo">276</span><a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private void writeStoreFile(StoreFileWriter writer) throws IOException {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    final int rowLen = 32;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      byte[] k = RandomKeyValueUtil.randomOrderedKey(rand, i);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      byte[] v = RandomKeyValueUtil.randomValue(rand);<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int cfLen = rand.nextInt(k.length - rowLen + 1);<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      KeyValue kv = new KeyValue(<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          k, 0, rowLen,<a name="line.284"></a>
+<span class="sourceLineNo">285</span>          k, rowLen, cfLen,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>          k, rowLen + cfLen, k.length - rowLen - cfLen,<a name="line.286"></a>
+<span class="sourceLineNo">287</span>          rand.nextLong(),<a name="line.287"></a>
+<span class="sourceLineNo">288</span>          generateKeyType(rand),<a name="line.288"></a>
+<span class="sourceLineNo">289</span>          v, 0, v.length);<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      writer.append(kv);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>}<a name="line.294"></a>
+<span class="sourceLineNo">295</span><a name="line.295"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html
----------------------------------------------------------------------
diff --git a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html
index 29963fb..3494514 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.html
@@ -169,137 +169,138 @@
 <span class="sourceLineNo">161</span>    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);<a name="line.161"></a>
 <span class="sourceLineNo">162</span>    conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false);<a name="line.162"></a>
 <span class="sourceLineNo">163</span>    conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, false);<a name="line.163"></a>
-<span class="sourceLineNo">164</span><a name="line.164"></a>
-<span class="sourceLineNo">165</span>    fs = HFileSystem.get(conf);<a name="line.165"></a>
-<span class="sourceLineNo">166</span><a name="line.166"></a>
-<span class="sourceLineNo">167</span>    // Create the schema<a name="line.167"></a>
-<span class="sourceLineNo">168</span>    ColumnFamilyDescriptor hcd = cowType<a name="line.168"></a>
-<span class="sourceLineNo">169</span>        .modifyFamilySchema(<a name="line.169"></a>
-<span class="sourceLineNo">170</span>          ColumnFamilyDescriptorBuilder.newBuilder(family).setBloomFilterType(BloomType.ROWCOL))<a name="line.170"></a>
-<span class="sourceLineNo">171</span>        .build();<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    TableDescriptor htd =<a name="line.172"></a>
-<span class="sourceLineNo">173</span>        TableDescriptorBuilder.newBuilder(TableName.valueOf(table)).setColumnFamily(hcd).build();<a name="line.173"></a>
-<span class="sourceLineNo">174</span><a name="line.174"></a>
-<span class="sourceLineNo">175</span>    // Create a store based on the schema<a name="line.175"></a>
-<span class="sourceLineNo">176</span>    String id = TestCacheOnWriteInSchema.class.getName();<a name="line.176"></a>
-<span class="sourceLineNo">177</span>    Path logdir = new Path(FSUtils.getRootDir(conf), AbstractFSWALProvider.getWALDirectoryName(id));<a name="line.177"></a>
-<span class="sourceLineNo">178</span>    fs.delete(logdir, true);<a name="line.178"></a>
-<span class="sourceLineNo">179</span><a name="line.179"></a>
-<span class="sourceLineNo">180</span>    RegionInfo info = RegionInfoBuilder.newBuilder(htd.getTableName()).build();<a name="line.180"></a>
-<span class="sourceLineNo">181</span>    walFactory = new WALFactory(conf, id);<a name="line.181"></a>
-<span class="sourceLineNo">182</span><a name="line.182"></a>
-<span class="sourceLineNo">183</span>    region = TEST_UTIL.createLocalHRegion(info, htd, walFactory.getWAL(info));<a name="line.183"></a>
-<span class="sourceLineNo">184</span>    store = new HStore(region, hcd, conf);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>  }<a name="line.185"></a>
-<span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>  @After<a name="line.187"></a>
-<span class="sourceLineNo">188</span>  public void tearDown() throws IOException {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>    IOException ex = null;<a name="line.189"></a>
-<span class="sourceLineNo">190</span>    try {<a name="line.190"></a>
-<span class="sourceLineNo">191</span>      region.close();<a name="line.191"></a>
-<span class="sourceLineNo">192</span>    } catch (IOException e) {<a name="line.192"></a>
-<span class="sourceLineNo">193</span>      LOG.warn("Caught Exception", e);<a name="line.193"></a>
-<span class="sourceLineNo">194</span>      ex = e;<a name="line.194"></a>
-<span class="sourceLineNo">195</span>    }<a name="line.195"></a>
-<span class="sourceLineNo">196</span>    try {<a name="line.196"></a>
-<span class="sourceLineNo">197</span>      walFactory.close();<a name="line.197"></a>
-<span class="sourceLineNo">198</span>    } catch (IOException e) {<a name="line.198"></a>
-<span class="sourceLineNo">199</span>      LOG.warn("Caught Exception", e);<a name="line.199"></a>
-<span class="sourceLineNo">200</span>      ex = e;<a name="line.200"></a>
-<span class="sourceLineNo">201</span>    }<a name="line.201"></a>
-<span class="sourceLineNo">202</span>    try {<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      fs.delete(new Path(DIR), true);<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    } catch (IOException e) {<a name="line.204"></a>
-<span class="sourceLineNo">205</span>      LOG.error("Could not delete " + DIR, e);<a name="line.205"></a>
-<span class="sourceLineNo">206</span>      ex = e;<a name="line.206"></a>
-<span class="sourceLineNo">207</span>    }<a name="line.207"></a>
-<span class="sourceLineNo">208</span>    if (ex != null) {<a name="line.208"></a>
-<span class="sourceLineNo">209</span>      throw ex;<a name="line.209"></a>
-<span class="sourceLineNo">210</span>    }<a name="line.210"></a>
-<span class="sourceLineNo">211</span>  }<a name="line.211"></a>
-<span class="sourceLineNo">212</span><a name="line.212"></a>
-<span class="sourceLineNo">213</span>  @Test<a name="line.213"></a>
-<span class="sourceLineNo">214</span>  public void testCacheOnWriteInSchema() throws IOException {<a name="line.214"></a>
-<span class="sourceLineNo">215</span>    // Write some random data into the store<a name="line.215"></a>
-<span class="sourceLineNo">216</span>    StoreFileWriter writer = store.createWriterInTmp(Integer.MAX_VALUE,<a name="line.216"></a>
-<span class="sourceLineNo">217</span>        HFile.DEFAULT_COMPRESSION_ALGORITHM, false, true, false, false);<a name="line.217"></a>
-<span class="sourceLineNo">218</span>    writeStoreFile(writer);<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    writer.close();<a name="line.219"></a>
-<span class="sourceLineNo">220</span>    // Verify the block types of interest were cached on write<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    readStoreFile(writer.getPath());<a name="line.221"></a>
-<span class="sourceLineNo">222</span>  }<a name="line.222"></a>
-<span class="sourceLineNo">223</span><a name="line.223"></a>
-<span class="sourceLineNo">224</span>  private void readStoreFile(Path path) throws IOException {<a name="line.224"></a>
-<span class="sourceLineNo">225</span>    CacheConfig cacheConf = store.getCacheConfig();<a name="line.225"></a>
-<span class="sourceLineNo">226</span>    BlockCache cache = cacheConf.getBlockCache();<a name="line.226"></a>
-<span class="sourceLineNo">227</span>    HStoreFile sf = new HStoreFile(fs, path, conf, cacheConf, BloomType.ROWCOL, true);<a name="line.227"></a>
-<span class="sourceLineNo">228</span>    sf.initReader();<a name="line.228"></a>
-<span class="sourceLineNo">229</span>    HFile.Reader reader = sf.getReader().getHFileReader();<a name="line.229"></a>
-<span class="sourceLineNo">230</span>    try {<a name="line.230"></a>
-<span class="sourceLineNo">231</span>      // Open a scanner with (on read) caching disabled<a name="line.231"></a>
-<span class="sourceLineNo">232</span>      HFileScanner scanner = reader.getScanner(false, false);<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      assertTrue(testDescription, scanner.seekTo());<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      // Cribbed from io.hfile.TestCacheOnWrite<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      long offset = 0;<a name="line.235"></a>
-<span class="sourceLineNo">236</span>      while (offset &lt; reader.getTrailer().getLoadOnOpenDataOffset()) {<a name="line.236"></a>
-<span class="sourceLineNo">237</span>        // Flags: don't cache the block, use pread, this is not a compaction.<a name="line.237"></a>
-<span class="sourceLineNo">238</span>        // Also, pass null for expected block type to avoid checking it.<a name="line.238"></a>
-<span class="sourceLineNo">239</span>        HFileBlock block = reader.readBlock(offset, -1, false, true,<a name="line.239"></a>
-<span class="sourceLineNo">240</span>          false, true, null, DataBlockEncoding.NONE);<a name="line.240"></a>
-<span class="sourceLineNo">241</span>        BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(),<a name="line.241"></a>
-<span class="sourceLineNo">242</span>          offset);<a name="line.242"></a>
-<span class="sourceLineNo">243</span>        boolean isCached = cache.getBlock(blockCacheKey, true, false, true) != null;<a name="line.243"></a>
-<span class="sourceLineNo">244</span>        boolean shouldBeCached = cowType.shouldBeCached(block.getBlockType());<a name="line.244"></a>
-<span class="sourceLineNo">245</span>        if (shouldBeCached != isCached) {<a name="line.245"></a>
-<span class="sourceLineNo">246</span>          throw new AssertionError(<a name="line.246"></a>
-<span class="sourceLineNo">247</span>            "shouldBeCached: " + shouldBeCached+ "\n" +<a name="line.247"></a>
-<span class="sourceLineNo">248</span>            "isCached: " + isCached + "\n" +<a name="line.248"></a>
-<span class="sourceLineNo">249</span>            "Test description: " + testDescription + "\n" +<a name="line.249"></a>
-<span class="sourceLineNo">250</span>            "block: " + block + "\n" +<a name="line.250"></a>
-<span class="sourceLineNo">251</span>            "blockCacheKey: " + blockCacheKey);<a name="line.251"></a>
-<span class="sourceLineNo">252</span>        }<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        offset += block.getOnDiskSizeWithHeader();<a name="line.253"></a>
-<span class="sourceLineNo">254</span>      }<a name="line.254"></a>
-<span class="sourceLineNo">255</span>    } finally {<a name="line.255"></a>
-<span class="sourceLineNo">256</span>      reader.close();<a name="line.256"></a>
-<span class="sourceLineNo">257</span>    }<a name="line.257"></a>
-<span class="sourceLineNo">258</span>  }<a name="line.258"></a>
-<span class="sourceLineNo">259</span><a name="line.259"></a>
-<span class="sourceLineNo">260</span>  private static KeyValue.Type generateKeyType(Random rand) {<a name="line.260"></a>
-<span class="sourceLineNo">261</span>    if (rand.nextBoolean()) {<a name="line.261"></a>
-<span class="sourceLineNo">262</span>      // Let's make half of KVs puts.<a name="line.262"></a>
-<span class="sourceLineNo">263</span>      return KeyValue.Type.Put;<a name="line.263"></a>
-<span class="sourceLineNo">264</span>    } else {<a name="line.264"></a>
-<span class="sourceLineNo">265</span>      KeyValue.Type keyType =<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];<a name="line.266"></a>
-<span class="sourceLineNo">267</span>      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum)<a name="line.267"></a>
-<span class="sourceLineNo">268</span>      {<a name="line.268"></a>
-<span class="sourceLineNo">269</span>        throw new RuntimeException("Generated an invalid key type: " + keyType<a name="line.269"></a>
-<span class="sourceLineNo">270</span>            + ". " + "Probably the layout of KeyValue.Type has changed.");<a name="line.270"></a>
-<span class="sourceLineNo">271</span>      }<a name="line.271"></a>
-<span class="sourceLineNo">272</span>      return keyType;<a name="line.272"></a>
-<span class="sourceLineNo">273</span>    }<a name="line.273"></a>
-<span class="sourceLineNo">274</span>  }<a name="line.274"></a>
-<span class="sourceLineNo">275</span><a name="line.275"></a>
-<span class="sourceLineNo">276</span>  private void writeStoreFile(StoreFileWriter writer) throws IOException {<a name="line.276"></a>
-<span class="sourceLineNo">277</span>    final int rowLen = 32;<a name="line.277"></a>
-<span class="sourceLineNo">278</span>    for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.278"></a>
-<span class="sourceLineNo">279</span>      byte[] k = RandomKeyValueUtil.randomOrderedKey(rand, i);<a name="line.279"></a>
-<span class="sourceLineNo">280</span>      byte[] v = RandomKeyValueUtil.randomValue(rand);<a name="line.280"></a>
-<span class="sourceLineNo">281</span>      int cfLen = rand.nextInt(k.length - rowLen + 1);<a name="line.281"></a>
-<span class="sourceLineNo">282</span>      KeyValue kv = new KeyValue(<a name="line.282"></a>
-<span class="sourceLineNo">283</span>          k, 0, rowLen,<a name="line.283"></a>
-<span class="sourceLineNo">284</span>          k, rowLen, cfLen,<a name="line.284"></a>
-<span class="sourceLineNo">285</span>          k, rowLen + cfLen, k.length - rowLen - cfLen,<a name="line.285"></a>
-<span class="sourceLineNo">286</span>          rand.nextLong(),<a name="line.286"></a>
-<span class="sourceLineNo">287</span>          generateKeyType(rand),<a name="line.287"></a>
-<span class="sourceLineNo">288</span>          v, 0, v.length);<a name="line.288"></a>
-<span class="sourceLineNo">289</span>      writer.append(kv);<a name="line.289"></a>
-<span class="sourceLineNo">290</span>    }<a name="line.290"></a>
-<span class="sourceLineNo">291</span>  }<a name="line.291"></a>
-<span class="sourceLineNo">292</span><a name="line.292"></a>
-<span class="sourceLineNo">293</span>}<a name="line.293"></a>
-<span class="sourceLineNo">294</span><a name="line.294"></a>
+<span class="sourceLineNo">164</span>    CacheConfig.instantiateBlockCache(conf);<a name="line.164"></a>
+<span class="sourceLineNo">165</span><a name="line.165"></a>
+<span class="sourceLineNo">166</span>    fs = HFileSystem.get(conf);<a name="line.166"></a>
+<span class="sourceLineNo">167</span><a name="line.167"></a>
+<span class="sourceLineNo">168</span>    // Create the schema<a name="line.168"></a>
+<span class="sourceLineNo">169</span>    ColumnFamilyDescriptor hcd = cowType<a name="line.169"></a>
+<span class="sourceLineNo">170</span>        .modifyFamilySchema(<a name="line.170"></a>
+<span class="sourceLineNo">171</span>          ColumnFamilyDescriptorBuilder.newBuilder(family).setBloomFilterType(BloomType.ROWCOL))<a name="line.171"></a>
+<span class="sourceLineNo">172</span>        .build();<a name="line.172"></a>
+<span class="sourceLineNo">173</span>    TableDescriptor htd =<a name="line.173"></a>
+<span class="sourceLineNo">174</span>        TableDescriptorBuilder.newBuilder(TableName.valueOf(table)).setColumnFamily(hcd).build();<a name="line.174"></a>
+<span class="sourceLineNo">175</span><a name="line.175"></a>
+<span class="sourceLineNo">176</span>    // Create a store based on the schema<a name="line.176"></a>
+<span class="sourceLineNo">177</span>    String id = TestCacheOnWriteInSchema.class.getName();<a name="line.177"></a>
+<span class="sourceLineNo">178</span>    Path logdir = new Path(FSUtils.getRootDir(conf), AbstractFSWALProvider.getWALDirectoryName(id));<a name="line.178"></a>
+<span class="sourceLineNo">179</span>    fs.delete(logdir, true);<a name="line.179"></a>
+<span class="sourceLineNo">180</span><a name="line.180"></a>
+<span class="sourceLineNo">181</span>    RegionInfo info = RegionInfoBuilder.newBuilder(htd.getTableName()).build();<a name="line.181"></a>
+<span class="sourceLineNo">182</span>    walFactory = new WALFactory(conf, id);<a name="line.182"></a>
+<span class="sourceLineNo">183</span><a name="line.183"></a>
+<span class="sourceLineNo">184</span>    region = TEST_UTIL.createLocalHRegion(info, htd, walFactory.getWAL(info));<a name="line.184"></a>
+<span class="sourceLineNo">185</span>    store = new HStore(region, hcd, conf);<a name="line.185"></a>
+<span class="sourceLineNo">186</span>  }<a name="line.186"></a>
+<span class="sourceLineNo">187</span><a name="line.187"></a>
+<span class="sourceLineNo">188</span>  @After<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  public void tearDown() throws IOException {<a name="line.189"></a>
+<span class="sourceLineNo">190</span>    IOException ex = null;<a name="line.190"></a>
+<span class="sourceLineNo">191</span>    try {<a name="line.191"></a>
+<span class="sourceLineNo">192</span>      region.close();<a name="line.192"></a>
+<span class="sourceLineNo">193</span>    } catch (IOException e) {<a name="line.193"></a>
+<span class="sourceLineNo">194</span>      LOG.warn("Caught Exception", e);<a name="line.194"></a>
+<span class="sourceLineNo">195</span>      ex = e;<a name="line.195"></a>
+<span class="sourceLineNo">196</span>    }<a name="line.196"></a>
+<span class="sourceLineNo">197</span>    try {<a name="line.197"></a>
+<span class="sourceLineNo">198</span>      walFactory.close();<a name="line.198"></a>
+<span class="sourceLineNo">199</span>    } catch (IOException e) {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      LOG.warn("Caught Exception", e);<a name="line.200"></a>
+<span class="sourceLineNo">201</span>      ex = e;<a name="line.201"></a>
+<span class="sourceLineNo">202</span>    }<a name="line.202"></a>
+<span class="sourceLineNo">203</span>    try {<a name="line.203"></a>
+<span class="sourceLineNo">204</span>      fs.delete(new Path(DIR), true);<a name="line.204"></a>
+<span class="sourceLineNo">205</span>    } catch (IOException e) {<a name="line.205"></a>
+<span class="sourceLineNo">206</span>      LOG.error("Could not delete " + DIR, e);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      ex = e;<a name="line.207"></a>
+<span class="sourceLineNo">208</span>    }<a name="line.208"></a>
+<span class="sourceLineNo">209</span>    if (ex != null) {<a name="line.209"></a>
+<span class="sourceLineNo">210</span>      throw ex;<a name="line.210"></a>
+<span class="sourceLineNo">211</span>    }<a name="line.211"></a>
+<span class="sourceLineNo">212</span>  }<a name="line.212"></a>
+<span class="sourceLineNo">213</span><a name="line.213"></a>
+<span class="sourceLineNo">214</span>  @Test<a name="line.214"></a>
+<span class="sourceLineNo">215</span>  public void testCacheOnWriteInSchema() throws IOException {<a name="line.215"></a>
+<span class="sourceLineNo">216</span>    // Write some random data into the store<a name="line.216"></a>
+<span class="sourceLineNo">217</span>    StoreFileWriter writer = store.createWriterInTmp(Integer.MAX_VALUE,<a name="line.217"></a>
+<span class="sourceLineNo">218</span>        HFile.DEFAULT_COMPRESSION_ALGORITHM, false, true, false, false);<a name="line.218"></a>
+<span class="sourceLineNo">219</span>    writeStoreFile(writer);<a name="line.219"></a>
+<span class="sourceLineNo">220</span>    writer.close();<a name="line.220"></a>
+<span class="sourceLineNo">221</span>    // Verify the block types of interest were cached on write<a name="line.221"></a>
+<span class="sourceLineNo">222</span>    readStoreFile(writer.getPath());<a name="line.222"></a>
+<span class="sourceLineNo">223</span>  }<a name="line.223"></a>
+<span class="sourceLineNo">224</span><a name="line.224"></a>
+<span class="sourceLineNo">225</span>  private void readStoreFile(Path path) throws IOException {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    CacheConfig cacheConf = store.getCacheConfig();<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    BlockCache cache = cacheConf.getBlockCache();<a name="line.227"></a>
+<span class="sourceLineNo">228</span>    HStoreFile sf = new HStoreFile(fs, path, conf, cacheConf, BloomType.ROWCOL, true);<a name="line.228"></a>
+<span class="sourceLineNo">229</span>    sf.initReader();<a name="line.229"></a>
+<span class="sourceLineNo">230</span>    HFile.Reader reader = sf.getReader().getHFileReader();<a name="line.230"></a>
+<span class="sourceLineNo">231</span>    try {<a name="line.231"></a>
+<span class="sourceLineNo">232</span>      // Open a scanner with (on read) caching disabled<a name="line.232"></a>
+<span class="sourceLineNo">233</span>      HFileScanner scanner = reader.getScanner(false, false);<a name="line.233"></a>
+<span class="sourceLineNo">234</span>      assertTrue(testDescription, scanner.seekTo());<a name="line.234"></a>
+<span class="sourceLineNo">235</span>      // Cribbed from io.hfile.TestCacheOnWrite<a name="line.235"></a>
+<span class="sourceLineNo">236</span>      long offset = 0;<a name="line.236"></a>
+<span class="sourceLineNo">237</span>      while (offset &lt; reader.getTrailer().getLoadOnOpenDataOffset()) {<a name="line.237"></a>
+<span class="sourceLineNo">238</span>        // Flags: don't cache the block, use pread, this is not a compaction.<a name="line.238"></a>
+<span class="sourceLineNo">239</span>        // Also, pass null for expected block type to avoid checking it.<a name="line.239"></a>
+<span class="sourceLineNo">240</span>        HFileBlock block = reader.readBlock(offset, -1, false, true,<a name="line.240"></a>
+<span class="sourceLineNo">241</span>          false, true, null, DataBlockEncoding.NONE);<a name="line.241"></a>
+<span class="sourceLineNo">242</span>        BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(),<a name="line.242"></a>
+<span class="sourceLineNo">243</span>          offset);<a name="line.243"></a>
+<span class="sourceLineNo">244</span>        boolean isCached = cache.getBlock(blockCacheKey, true, false, true) != null;<a name="line.244"></a>
+<span class="sourceLineNo">245</span>        boolean shouldBeCached = cowType.shouldBeCached(block.getBlockType());<a name="line.245"></a>
+<span class="sourceLineNo">246</span>        if (shouldBeCached != isCached) {<a name="line.246"></a>
+<span class="sourceLineNo">247</span>          throw new AssertionError(<a name="line.247"></a>
+<span class="sourceLineNo">248</span>            "shouldBeCached: " + shouldBeCached+ "\n" +<a name="line.248"></a>
+<span class="sourceLineNo">249</span>            "isCached: " + isCached + "\n" +<a name="line.249"></a>
+<span class="sourceLineNo">250</span>            "Test description: " + testDescription + "\n" +<a name="line.250"></a>
+<span class="sourceLineNo">251</span>            "block: " + block + "\n" +<a name="line.251"></a>
+<span class="sourceLineNo">252</span>            "blockCacheKey: " + blockCacheKey);<a name="line.252"></a>
+<span class="sourceLineNo">253</span>        }<a name="line.253"></a>
+<span class="sourceLineNo">254</span>        offset += block.getOnDiskSizeWithHeader();<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      }<a name="line.255"></a>
+<span class="sourceLineNo">256</span>    } finally {<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      reader.close();<a name="line.257"></a>
+<span class="sourceLineNo">258</span>    }<a name="line.258"></a>
+<span class="sourceLineNo">259</span>  }<a name="line.259"></a>
+<span class="sourceLineNo">260</span><a name="line.260"></a>
+<span class="sourceLineNo">261</span>  private static KeyValue.Type generateKeyType(Random rand) {<a name="line.261"></a>
+<span class="sourceLineNo">262</span>    if (rand.nextBoolean()) {<a name="line.262"></a>
+<span class="sourceLineNo">263</span>      // Let's make half of KVs puts.<a name="line.263"></a>
+<span class="sourceLineNo">264</span>      return KeyValue.Type.Put;<a name="line.264"></a>
+<span class="sourceLineNo">265</span>    } else {<a name="line.265"></a>
+<span class="sourceLineNo">266</span>      KeyValue.Type keyType =<a name="line.266"></a>
+<span class="sourceLineNo">267</span>          KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];<a name="line.267"></a>
+<span class="sourceLineNo">268</span>      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum)<a name="line.268"></a>
+<span class="sourceLineNo">269</span>      {<a name="line.269"></a>
+<span class="sourceLineNo">270</span>        throw new RuntimeException("Generated an invalid key type: " + keyType<a name="line.270"></a>
+<span class="sourceLineNo">271</span>            + ". " + "Probably the layout of KeyValue.Type has changed.");<a name="line.271"></a>
+<span class="sourceLineNo">272</span>      }<a name="line.272"></a>
+<span class="sourceLineNo">273</span>      return keyType;<a name="line.273"></a>
+<span class="sourceLineNo">274</span>    }<a name="line.274"></a>
+<span class="sourceLineNo">275</span>  }<a name="line.275"></a>
+<span class="sourceLineNo">276</span><a name="line.276"></a>
+<span class="sourceLineNo">277</span>  private void writeStoreFile(StoreFileWriter writer) throws IOException {<a name="line.277"></a>
+<span class="sourceLineNo">278</span>    final int rowLen = 32;<a name="line.278"></a>
+<span class="sourceLineNo">279</span>    for (int i = 0; i &lt; NUM_KV; ++i) {<a name="line.279"></a>
+<span class="sourceLineNo">280</span>      byte[] k = RandomKeyValueUtil.randomOrderedKey(rand, i);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      byte[] v = RandomKeyValueUtil.randomValue(rand);<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      int cfLen = rand.nextInt(k.length - rowLen + 1);<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      KeyValue kv = new KeyValue(<a name="line.283"></a>
+<span class="sourceLineNo">284</span>          k, 0, rowLen,<a name="line.284"></a>
+<span class="sourceLineNo">285</span>          k, rowLen, cfLen,<a name="line.285"></a>
+<span class="sourceLineNo">286</span>          k, rowLen + cfLen, k.length - rowLen - cfLen,<a name="line.286"></a>
+<span class="sourceLineNo">287</span>          rand.nextLong(),<a name="line.287"></a>
+<span class="sourceLineNo">288</span>          generateKeyType(rand),<a name="line.288"></a>
+<span class="sourceLineNo">289</span>          v, 0, v.length);<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      writer.append(kv);<a name="line.290"></a>
+<span class="sourceLineNo">291</span>    }<a name="line.291"></a>
+<span class="sourceLineNo">292</span>  }<a name="line.292"></a>
+<span class="sourceLineNo">293</span><a name="line.293"></a>
+<span class="sourceLineNo">294</span>}<a name="line.294"></a>
+<span class="sourceLineNo">295</span><a name="line.295"></a>
 
 
 


[21/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 42af249..ead8bd1 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -716,20 +716,20 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/FlushType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">FlushType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/DefaultHeapMemoryTuner.StepDirection.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">DefaultHeapMemoryTuner.StepDirection</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/CompactingMemStore.IndexType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">CompactingMemStore.IndexType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/TimeRangeTracker.Type.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">TimeRangeTracker.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/Region.Operation.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">Region.Operation</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.Result.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">HRegion.FlushResult.Result</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ChunkCreator.ChunkType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ChunkCreator.ChunkType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/SplitLogWorker.TaskExecutor.Status.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">SplitLogWorker.TaskExecutor.Status</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MetricsRegionServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.Action.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">MemStoreCompactionStrategy.Action</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/BloomType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">BloomType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScanType.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScanType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.NextState.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.NextState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.<a href="../../../../../org/apache/hadoop/hbase/regionserver/ScannerContext.LimitScope.html" title="enum in org.apache.hadoop.hbase.regionserver"><span class="typeNameLink">ScannerContext.LimitScope</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 6851ee1..23060c2 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,9 +130,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/DeleteTracker.DeleteResult.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">DeleteTracker.DeleteResult</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/ScanQueryMatcher.MatchCode.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">ScanQueryMatcher.MatchCode</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.querymatcher.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/querymatcher/StripeCompactionScanQueryMatcher.DropDeletesInOutput.html" title="enum in org.apache.hadoop.hbase.regionserver.querymatcher"><span class="typeNameLink">StripeCompactionScanQueryMatcher.DropDeletesInOutput</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index 46651a5..7eedc5c 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -248,8 +248,8 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.WALHdrResult.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">ProtobufLogReader.WALHdrResult</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.Type.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">RingBufferTruck.Type</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.regionserver.wal.<a href="../../../../../../org/apache/hadoop/hbase/regionserver/wal/CompressionContext.DictionaryIndex.html" title="enum in org.apache.hadoop.hbase.regionserver.wal"><span class="typeNameLink">CompressionContext.DictionaryIndex</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html b/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
index 3c7146a..f8e4b11 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/package-tree.html
@@ -160,8 +160,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/ReplicationPeer.PeerState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">ReplicationPeer.PeerState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/SyncReplicationState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">SyncReplicationState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.replication.<a href="../../../../../org/apache/hadoop/hbase/replication/ReplicationPeer.PeerState.html" title="enum in org.apache.hadoop.hbase.replication"><span class="typeNameLink">ReplicationPeer.PeerState</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
index 30c4e73..cd3870f 100644
--- a/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/replication/regionserver/package-tree.html
@@ -207,8 +207,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceFactoryImpl.SourceHolder.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">MetricsReplicationSourceFactoryImpl.SourceHolder</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.WorkerState.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">ReplicationSourceShipper.WorkerState</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.replication.regionserver.<a href="../../../../../../org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceFactoryImpl.SourceHolder.html" title="enum in org.apache.hadoop.hbase.replication.regionserver"><span class="typeNameLink">MetricsReplicationSourceFactoryImpl.SourceHolder</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
index c20ff47..034077c 100644
--- a/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/rest/model/package-tree.html
@@ -110,8 +110,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.ByteArrayComparableModel.ComparatorType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.rest.model.<a href="../../../../../../org/apache/hadoop/hbase/rest/model/ScannerModel.FilterModel.FilterType.html" title="enum in org.apache.hadoop.hbase.rest.model"><span class="typeNameLink">ScannerModel.FilterModel.FilterType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
index a7f8883..f56fa93 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/package-tree.html
@@ -141,8 +141,8 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Scope.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Scope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/Permission.Action.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">Permission.Action</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessControlFilter.Strategy.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessControlFilter.Strategy</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.access.<a href="../../../../../../org/apache/hadoop/hbase/security/access/AccessController.OpType.html" title="enum in org.apache.hadoop.hbase.security.access"><span class="typeNameLink">AccessController.OpType</span></a></li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
index 7ba3a64..6ef281d 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/package-tree.html
@@ -191,9 +191,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslUtil.QualityOfProtection.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslUtil.QualityOfProtection</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/SaslStatus.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">SaslStatus</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.security.<a href="../../../../../org/apache/hadoop/hbase/security/AuthMethod.html" title="enum in org.apache.hadoop.hbase.security"><span class="typeNameLink">AuthMethod</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
index 81a90a9..2cef8bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/package-tree.html
@@ -199,9 +199,9 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftServerRunner.ImplType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/ThriftMetrics.ThriftServerType.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">ThriftMetrics.ThriftServerType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.thrift.<a href="../../../../../org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactoryImpl.FactoryStorage.html" title="enum in org.apache.hadoop.hbase.thrift"><span class="typeNameLink">MetricsThriftServerSourceFactoryImpl.FactoryStorage</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 958d0c9..bb451ea 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -541,13 +541,13 @@
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PrettyPrinter.Unit</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">ChecksumType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">PoolMap.PoolType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html" title="enum in org.apache.hadoop.hbase.util"><span class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a> (implements org.apache.hadoop.hbase.util.<a href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" title="interface in org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index da83514..d3c8648 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -18,9 +18,9 @@
 <span class="sourceLineNo">010</span>  public static final String version = "3.0.0-SNAPSHOT";<a name="line.10"></a>
 <span class="sourceLineNo">011</span>  public static final String revision = "";<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String user = "jenkins";<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String date = "Mon Nov 26 14:44:10 UTC 2018";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String date = "Tue Nov 27 14:43:49 UTC 2018";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String url = "git://jenkins-websites1.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String srcChecksum = "4d3ee036754273b7c20a302784193204";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String srcChecksum = "17dd2a5425ebe5b21808fd50b5751598";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>}<a name="line.16"></a>
 
 


[23/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index 83d53d3..186b826 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -216,11 +216,11 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/RootProcedureState.State.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">RootProcedureState.State</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockedResourceType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockedResourceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/LockType.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">LockType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/StateMachineProcedure.Flow.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">StateMachineProcedure.Flow</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.procedure2.<a href="../../../../../org/apache/hadoop/hbase/procedure2/Procedure.LockState.html" title="enum in org.apache.hadoop.hbase.procedure2"><span class="typeNameLink">Procedure.LockState</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index b8cd9d5..5557f8f 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -229,12 +229,12 @@
 <ul>
 <li type="circle">java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true" title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</a>)
 <ul>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/RpcThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">RpcThrottlingException.Type</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottleType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottleType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/OperationQuota.OperationType.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">OperationQuota.OperationType</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/QuotaScope.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">QuotaScope</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/ThrottlingException.Type.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">ThrottlingException.Type</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.quotas.<a href="../../../../../org/apache/hadoop/hbase/quotas/SpaceViolationPolicy.html" title="enum in org.apache.hadoop.hbase.quotas"><span class="typeNameLink">SpaceViolationPolicy</span></a></li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html b/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html
index ba93974..165a1ec 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html
@@ -105,7 +105,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>public static interface <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.45">AbstractMultiFileWriter.WriterFactory</a></pre>
+<pre>public static interface <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.43">AbstractMultiFileWriter.WriterFactory</a></pre>
 </li>
 </ul>
 </div>
@@ -149,7 +149,7 @@ var activeTableTab = "activeTableTab";
 <ul class="blockListLast">
 <li class="blockList">
 <h4>createWriter</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html#line.46">createWriter</a>()
+<pre><a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html#line.44">createWriter</a>()
                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html b/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html
index a4f8536..4f3884f 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public abstract class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.35">AbstractMultiFileWriter</a>
+public abstract class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.33">AbstractMultiFileWriter</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink.html" title="interface in org.apache.hadoop.hbase.regionserver">CellSink</a>, <a href="../../../../../org/apache/hadoop/hbase/regionserver/ShipperListener.html" title="interface in org.apache.hadoop.hbase.regionserver">ShipperListener</a></pre>
 <div class="block">Base class for cell sink that separates the provided cells into multiple files.</div>
@@ -285,7 +285,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockList">
 <li class="blockList">
 <h4>LOG</h4>
-<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.37">LOG</a></pre>
+<pre>private static final&nbsp;org.slf4j.Logger <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.35">LOG</a></pre>
 </li>
 </ul>
 <a name="writerFactory">
@@ -294,7 +294,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockList">
 <li class="blockList">
 <h4>writerFactory</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html" title="interface in org.apache.hadoop.hbase.regionserver">AbstractMultiFileWriter.WriterFactory</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.40">writerFactory</a></pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html" title="interface in org.apache.hadoop.hbase.regionserver">AbstractMultiFileWriter.WriterFactory</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.38">writerFactory</a></pre>
 <div class="block">Factory that is used to produce single StoreFile.Writer-s</div>
 </li>
 </ul>
@@ -304,7 +304,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockListLast">
 <li class="blockList">
 <h4>sourceScanner</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreScanner</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.43">sourceScanner</a></pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreScanner</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.41">sourceScanner</a></pre>
 <div class="block">Source scanner that is tracking KV count; may be null if source is not StoreScanner</div>
 </li>
 </ul>
@@ -322,7 +322,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockListLast">
 <li class="blockList">
 <h4>AbstractMultiFileWriter</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.35">AbstractMultiFileWriter</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.33">AbstractMultiFileWriter</a>()</pre>
 </li>
 </ul>
 </li>
@@ -339,7 +339,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockList">
 <li class="blockList">
 <h4>init</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.54">init</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreScanner</a>&nbsp;sourceScanner,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.52">init</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreScanner.html" title="class in org.apache.hadoop.hbase.regionserver">StoreScanner</a>&nbsp;sourceScanner,
                  <a href="../../../../../org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.WriterFactory.html" title="interface in org.apache.hadoop.hbase.regionserver">AbstractMultiFileWriter.WriterFactory</a>&nbsp;factory)</pre>
 <div class="block">Initializes multi-writer before usage.</div>
 <dl>
@@ -355,7 +355,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockList">
 <li class="blockList">
 <h4>commitWriters</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.66">commitWriters</a>(long&nbsp;maxSeqId,
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.64">commitWriters</a>(long&nbsp;maxSeqId,
                                                      boolean&nbsp;majorCompaction)
                                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Commit all writers.
@@ -375,7 +375,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockList">
 <li class="blockList">
 <h4>abortWriters</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.89">abortWriters</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;org.apache.hadoop.fs.Path&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.87">abortWriters</a>()</pre>
 <div class="block">Close all writers without throwing any exceptions. This is used when compaction failed usually.</div>
 </li>
 </ul>
@@ -385,7 +385,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockList">
 <li class="blockList">
 <h4>writers</h4>
-<pre>protected abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.104">writers</a>()</pre>
+<pre>protected abstract&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.102">writers</a>()</pre>
 </li>
 </ul>
 <a name="preCommitWriters--">
@@ -394,7 +394,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockList">
 <li class="blockList">
 <h4>preCommitWriters</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.110">preCommitWriters</a>()
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.108">preCommitWriters</a>()
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Subclasses override this method to be called at the end of a successful sequence of append; all
  appends are processed before this method is called.</div>
@@ -410,7 +410,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockList">
 <li class="blockList">
 <h4>preCloseWriter</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.117">preCloseWriter</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;writer)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.115">preCloseWriter</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/StoreFileWriter.html" title="class in org.apache.hadoop.hbase.regionserver">StoreFileWriter</a>&nbsp;writer)
                        throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Subclasses override this method to be called before we close the give writer. Usually you can
  append extra metadata to the writer.</div>
@@ -426,7 +426,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/regionserver/CellSink
 <ul class="blockListLast">
 <li class="blockList">
 <h4>beforeShipped</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.121">beforeShipped</a>()
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/AbstractMultiFileWriter.html#line.119">beforeShipped</a>()
                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/ShipperListener.html#beforeShipped--">ShipperListener</a></code></span></div>
 <div class="block">The action that needs to be performed before <a href="../../../../../org/apache/hadoop/hbase/regionserver/Shipper.html#shipped--"><code>Shipper.shipped()</code></a> is performed</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
index 1da2c31..b76dcda 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1725">HRegionServer.CompactionChecker</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1730">HRegionServer.CompactionChecker</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a></pre>
 </li>
 </ul>
@@ -233,7 +233,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>instance</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1726">instance</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1731">instance</a></pre>
 </li>
 </ul>
 <a name="majorCompactPriority">
@@ -242,7 +242,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>majorCompactPriority</h4>
-<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1727">majorCompactPriority</a></pre>
+<pre>private final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1732">majorCompactPriority</a></pre>
 </li>
 </ul>
 <a name="DEFAULT_PRIORITY">
@@ -251,7 +251,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>DEFAULT_PRIORITY</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1728">DEFAULT_PRIORITY</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1733">DEFAULT_PRIORITY</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.HRegionServer.CompactionChecker.DEFAULT_PRIORITY">Constant Field Values</a></dd>
@@ -264,7 +264,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>iteration</h4>
-<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1731">iteration</a></pre>
+<pre>private&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1736">iteration</a></pre>
 </li>
 </ul>
 </li>
@@ -281,7 +281,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>CompactionChecker</h4>
-<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1733">CompactionChecker</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;h,
+<pre><a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1738">CompactionChecker</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;h,
                   int&nbsp;sleepTime,
                   <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a>&nbsp;stopper)</pre>
 </li>
@@ -300,7 +300,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>chore</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1747">chore</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.CompactionChecker.html#line.1752">chore</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html#chore--">ScheduledChore</a></code></span></div>
 <div class="block">The task to execute on each scheduled execution of the Chore</div>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
index 6a88e40..da8e035 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3440">HRegionServer.MovedRegionInfo</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3445">HRegionServer.MovedRegionInfo</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></pre>
 </li>
 </ul>
@@ -218,7 +218,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>serverName</h4>
-<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3441">serverName</a></pre>
+<pre>private final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3446">serverName</a></pre>
 </li>
 </ul>
 <a name="seqNum">
@@ -227,7 +227,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>seqNum</h4>
-<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3442">seqNum</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3447">seqNum</a></pre>
 </li>
 </ul>
 <a name="ts">
@@ -236,7 +236,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>ts</h4>
-<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3443">ts</a></pre>
+<pre>private final&nbsp;long <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3448">ts</a></pre>
 </li>
 </ul>
 </li>
@@ -253,7 +253,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MovedRegionInfo</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3445">MovedRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3450">MovedRegionInfo</a>(<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;serverName,
                        long&nbsp;closeSeqNum)</pre>
 </li>
 </ul>
@@ -271,7 +271,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getServerName</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3451">getServerName</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3456">getServerName</a>()</pre>
 </li>
 </ul>
 <a name="getSeqNum--">
@@ -280,7 +280,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>getSeqNum</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3455">getSeqNum</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3460">getSeqNum</a>()</pre>
 </li>
 </ul>
 <a name="getMoveTime--">
@@ -289,7 +289,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockListLast">
 <li class="blockList">
 <h4>getMoveTime</h4>
-<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3459">getMoveTime</a>()</pre>
+<pre>public&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html#line.3464">getMoveTime</a>()</pre>
 </li>
 </ul>
 </li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
index 8e1bb52..20c585f 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>protected static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3527">HRegionServer.MovedRegionsCleaner</a>
+<pre>protected static final class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3532">HRegionServer.MovedRegionsCleaner</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a>
 implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a></pre>
 <div class="block">Creates a Chore thread to clean the moved region cache.</div>
@@ -242,7 +242,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>regionServer</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3528">regionServer</a></pre>
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3533">regionServer</a></pre>
 </li>
 </ul>
 <a name="stoppable">
@@ -251,7 +251,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockListLast">
 <li class="blockList">
 <h4>stoppable</h4>
-<pre><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3529">stoppable</a></pre>
+<pre><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3534">stoppable</a></pre>
 </li>
 </ul>
 </li>
@@ -268,7 +268,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockListLast">
 <li class="blockList">
 <h4>MovedRegionsCleaner</h4>
-<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3531">MovedRegionsCleaner</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;regionServer,
+<pre>private&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3536">MovedRegionsCleaner</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;regionServer,
                             <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a>&nbsp;stoppable)</pre>
 </li>
 </ul>
@@ -286,7 +286,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>create</h4>
-<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.MovedRegionsCleaner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3539">create</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;rs)</pre>
+<pre>static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.MovedRegionsCleaner</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3544">create</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;rs)</pre>
 </li>
 </ul>
 <a name="chore--">
@@ -295,7 +295,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>chore</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3550">chore</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3555">chore</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html#chore--">ScheduledChore</a></code></span></div>
 <div class="block">The task to execute on each scheduled execution of the Chore</div>
 <dl>
@@ -310,7 +310,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockList">
 <li class="blockList">
 <h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3555">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3560">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;why)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html#stop-java.lang.String-">Stoppable</a></code></span></div>
 <div class="block">Stop this service.
  Implementers should favor logging errors over throwing RuntimeExceptions.</div>
@@ -328,7 +328,7 @@ implements <a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title
 <ul class="blockListLast">
 <li class="blockList">
 <h4>isStopped</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3560">isStopped</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionsCleaner.html#line.3565">isStopped</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html#isStopped--">isStopped</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a></code></dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
index 78a5a65..a2b0120 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1787">HRegionServer.PeriodicMemStoreFlusher</a>
+<pre>static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1792">HRegionServer.PeriodicMemStoreFlusher</a>
 extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" title="class in org.apache.hadoop.hbase">ScheduledChore</a></pre>
 </li>
 </ul>
@@ -232,7 +232,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>server</h4>
-<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1788">server</a></pre>
+<pre>final&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a> <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1793">server</a></pre>
 </li>
 </ul>
 <a name="RANGE_OF_DELAY">
@@ -241,7 +241,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>RANGE_OF_DELAY</h4>
-<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1789">RANGE_OF_DELAY</a></pre>
+<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1794">RANGE_OF_DELAY</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.HRegionServer.PeriodicMemStoreFlusher.RANGE_OF_DELAY">Constant Field Values</a></dd>
@@ -254,7 +254,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockList">
 <li class="blockList">
 <h4>MIN_DELAY_TIME</h4>
-<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1790">MIN_DELAY_TIME</a></pre>
+<pre>static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1795">MIN_DELAY_TIME</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.HRegionServer.PeriodicMemStoreFlusher.MIN_DELAY_TIME">Constant Field Values</a></dd>
@@ -267,7 +267,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>rangeOfDelay</h4>
-<pre>final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1792">rangeOfDelay</a></pre>
+<pre>final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1797">rangeOfDelay</a></pre>
 </li>
 </ul>
 </li>
@@ -284,7 +284,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>PeriodicMemStoreFlusher</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1793">PeriodicMemStoreFlusher</a>(int&nbsp;cacheFlushInterval,
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1798">PeriodicMemStoreFlusher</a>(int&nbsp;cacheFlushInterval,
                                <a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a>&nbsp;server)</pre>
 </li>
 </ul>
@@ -302,7 +302,7 @@ extends <a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html" tit
 <ul class="blockListLast">
 <li class="blockList">
 <h4>chore</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1802">chore</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.PeriodicMemStoreFlusher.html#line.1807">chore</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from class:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/ScheduledChore.html#chore--">ScheduledChore</a></code></span></div>
 <div class="block">The task to execute on each scheduled execution of the Chore</div>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
index ed76ee3..7e272c9 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 </dl>
 <hr>
 <br>
-<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3831">HRegionServer.SystemExitWhenAbortTimeout</a>
+<pre>private static class <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3836">HRegionServer.SystemExitWhenAbortTimeout</a>
 extends <a href="https://docs.oracle.com/javase/8/docs/api/java/util/TimerTask.html?is-external=true" title="class or interface in java.util">TimerTask</a></pre>
 <div class="block">Force to terminate region server when abort timeout.</div>
 </li>
@@ -199,7 +199,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/util/TimerTask.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>SystemExitWhenAbortTimeout</h4>
-<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html#line.3833">SystemExitWhenAbortTimeout</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html#line.3838">SystemExitWhenAbortTimeout</a>()</pre>
 </li>
 </ul>
 </li>
@@ -216,7 +216,7 @@ extends <a href="https://docs.oracle.com/javase/8/docs/api/java/util/TimerTask.h
 <ul class="blockListLast">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html#line.3837">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.SystemExitWhenAbortTimeout.html#line.3842">run</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--" title="class or interface in java.lang">run</a></code>&nbsp;in interface&nbsp;<code><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true" title="class or interface in java.lang">Runnable</a></code></dd>


[22/26] hbase-site git commit: Published site at 6f15cecaed2f1f76bfe1880b7c578ed369daa5d5.

Posted by gi...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/dccdd274/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
index dd6d5f5..8d1bfab 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HRegionServer.html
@@ -2481,7 +2481,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>movedRegions</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.MovedRegionInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3466">movedRegions</a></pre>
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true" title="class or interface in java.util">Map</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>,<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.MovedRegionInfo.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer.MovedRegionInfo</a>&gt; <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3471">movedRegions</a></pre>
 </li>
 </ul>
 <a name="TIMEOUT_REGION_MOVED">
@@ -2490,7 +2490,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockListLast">
 <li class="blockList">
 <h4>TIMEOUT_REGION_MOVED</h4>
-<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3470">TIMEOUT_REGION_MOVED</a></pre>
+<pre>private static final&nbsp;int <a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.3475">TIMEOUT_REGION_MOVED</a></pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../constant-values.html#org.apache.hadoop.hbase.regionserver.HRegionServer.TIMEOUT_REGION_MOVED">Constant Field Values</a></dd>
@@ -2534,7 +2534,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getUseThisHostnameInstead</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.658">getUseThisHostnameInstead</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.663">getUseThisHostnameInstead</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2548,7 +2548,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>setupWindows</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.677">setupWindows</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.682">setupWindows</a>(org.apache.hadoop.conf.Configuration&nbsp;conf,
                                  <a href="../../../../../org/apache/hadoop/hbase/conf/ConfigurationManager.html" title="class in org.apache.hadoop.hbase.conf">ConfigurationManager</a>&nbsp;cm)</pre>
 <div class="block">If running on Windows, do windows-specific setup.</div>
 </li>
@@ -2559,7 +2559,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>setupNetty</h4>
-<pre>private static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/NettyEventLoopGroupConfig.html" title="class in org.apache.hadoop.hbase.util">NettyEventLoopGroupConfig</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.689">setupNetty</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
+<pre>private static&nbsp;<a href="../../../../../org/apache/hadoop/hbase/util/NettyEventLoopGroupConfig.html" title="class in org.apache.hadoop.hbase.util">NettyEventLoopGroupConfig</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.694">setupNetty</a>(org.apache.hadoop.conf.Configuration&nbsp;conf)</pre>
 </li>
 </ul>
 <a name="initializeFileSystem--">
@@ -2568,7 +2568,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>initializeFileSystem</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.698">initializeFileSystem</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.703">initializeFileSystem</a>()
                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2582,7 +2582,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getFsTableDescriptors</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableDescriptors.html" title="interface in org.apache.hadoop.hbase">TableDescriptors</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.714">getFsTableDescriptors</a>()
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/TableDescriptors.html" title="interface in org.apache.hadoop.hbase">TableDescriptors</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.719">getFsTableDescriptors</a>()
                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2596,7 +2596,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaTableObserver</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptorBuilder.html" title="class in org.apache.hadoop.hbase.client">TableDescriptorBuilder</a>,<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptorBuilder.html" title="class in org.apache.hadoop.hbase.client">TableDescriptorBuilder</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.719">getMetaTableObserver</a>()</pre>
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true" title="class or interface in java.util.function">Function</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptorBuilder.html" title="class in org.apache.hadoop.hbase.client">TableDescriptorBuilder</a>,<a href="../../../../../org/apache/hadoop/hbase/client/TableDescriptorBuilder.html" title="class in org.apache.hadoop.hbase.client">TableDescriptorBuilder</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.724">getMetaTableObserver</a>()</pre>
 </li>
 </ul>
 <a name="login-org.apache.hadoop.hbase.security.UserProvider-java.lang.String-">
@@ -2605,7 +2605,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>login</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.723">login</a>(<a href="../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;user,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.728">login</a>(<a href="../../../../../org/apache/hadoop/hbase/security/UserProvider.html" title="class in org.apache.hadoop.hbase.security">UserProvider</a>&nbsp;user,
                      <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;host)
               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -2620,7 +2620,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForMasterActive</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.733">waitForMasterActive</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.738">waitForMasterActive</a>()</pre>
 <div class="block">Wait for an active Master.
  See override in Master superclass for how it is used.</div>
 </li>
@@ -2631,7 +2631,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getProcessName</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.735">getProcessName</a>()</pre>
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.740">getProcessName</a>()</pre>
 </li>
 </ul>
 <a name="canCreateBaseZNode--">
@@ -2640,7 +2640,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>canCreateBaseZNode</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.739">canCreateBaseZNode</a>()</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.744">canCreateBaseZNode</a>()</pre>
 </li>
 </ul>
 <a name="canUpdateTableDescriptor--">
@@ -2649,7 +2649,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>canUpdateTableDescriptor</h4>
-<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.743">canUpdateTableDescriptor</a>()</pre>
+<pre>protected&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.748">canUpdateTableDescriptor</a>()</pre>
 </li>
 </ul>
 <a name="createRpcServices--">
@@ -2658,7 +2658,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>createRpcServices</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.747">createRpcServices</a>()
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.752">createRpcServices</a>()
                                    throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -2672,7 +2672,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>configureInfoServer</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.751">configureInfoServer</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.756">configureInfoServer</a>()</pre>
 </li>
 </ul>
 <a name="getDumpServlet--">
@@ -2681,7 +2681,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getDumpServlet</h4>
-<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends javax.servlet.http.HttpServlet&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.756">getDumpServlet</a>()</pre>
+<pre>protected&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true" title="class or interface in java.lang">Class</a>&lt;? extends javax.servlet.http.HttpServlet&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.761">getDumpServlet</a>()</pre>
 </li>
 </ul>
 <a name="registerService-com.google.protobuf.Service-">
@@ -2690,7 +2690,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>registerService</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.761">registerService</a>(com.google.protobuf.Service&nbsp;instance)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.766">registerService</a>(com.google.protobuf.Service&nbsp;instance)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html#registerService-com.google.protobuf.Service-">RegionServerServices</a></code></span></div>
 <div class="block">Registers a new protocol buffer <code>Service</code> subclass as a coprocessor endpoint to be
  available for handling</div>
@@ -2710,7 +2710,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>createClusterConnection</h4>
-<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.785">createClusterConnection</a>()
+<pre>private&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.790">createClusterConnection</a>()
                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Create a 'smarter' Connection, one that is capable of by-passing RPC if the request is to the
  local server; i.e. a short-circuit Connection. Safe to use going to local or remote server.</div>
@@ -2726,7 +2726,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>checkCodecs</h4>
-<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.812">checkCodecs</a>(org.apache.hadoop.conf.Configuration&nbsp;c)
+<pre>private static&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.817">checkCodecs</a>(org.apache.hadoop.conf.Configuration&nbsp;c)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Run test on configured codecs to make sure supporting libs are in place.</div>
 <dl>
@@ -2743,7 +2743,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterId</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.824">getClusterId</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.829">getClusterId</a>()</pre>
 </li>
 </ul>
 <a name="setupClusterConnection--">
@@ -2752,7 +2752,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>setupClusterConnection</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.831">setupClusterConnection</a>()
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.836">setupClusterConnection</a>()
                                throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Setup our cluster connection if not already initialized.</div>
 <dl>
@@ -2767,7 +2767,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>preRegistrationInitialization</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.843">preRegistrationInitialization</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.848">preRegistrationInitialization</a>()</pre>
 <div class="block">All initialization needed before we go register with Master.<br>
  Do bare minimum. Do bulk of initializations AFTER we've connected to the Master.<br>
  In here we just put up the RpcServer, setup Connection, and ZooKeeper.</div>
@@ -2779,7 +2779,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>initializeZooKeeper</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.866">initializeZooKeeper</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.871">initializeZooKeeper</a>()
                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                  <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Bring up connection to zk ensemble and then wait until a master for this cluster and then after
@@ -2799,7 +2799,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>blockAndCheckIfStopped</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.919">blockAndCheckIfStopped</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKNodeTracker</a>&nbsp;tracker)
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.924">blockAndCheckIfStopped</a>(<a href="../../../../../org/apache/hadoop/hbase/zookeeper/ZKNodeTracker.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKNodeTracker</a>&nbsp;tracker)
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a>,
                                     <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true" title="class or interface in java.lang">InterruptedException</a></pre>
 <div class="block">Utilty method to wait indefinitely on a znode availability while checking
@@ -2819,7 +2819,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>isClusterUp</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.932">isClusterUp</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.937">isClusterUp</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html#isClusterUp--">isClusterUp</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a></code></dd>
@@ -2834,7 +2834,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>run</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.941">run</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.946">run</a>()</pre>
 <div class="block">The HRegionServer sticks in this loop until closed.</div>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -2850,7 +2850,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>containsMetaTableRegions</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1186">containsMetaTableRegions</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1191">containsMetaTableRegions</a>()</pre>
 </li>
 </ul>
 <a name="areAllUserRegionsOffline--">
@@ -2859,7 +2859,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>areAllUserRegionsOffline</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1190">areAllUserRegionsOffline</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1195">areAllUserRegionsOffline</a>()</pre>
 </li>
 </ul>
 <a name="getWriteRequestCount--">
@@ -2868,7 +2868,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getWriteRequestCount</h4>
-<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1205">getWriteRequestCount</a>()</pre>
+<pre>private&nbsp;long&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1210">getWriteRequestCount</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>Current write count for all online regions.</dd>
@@ -2881,7 +2881,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>tryRegionServerReport</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1214">tryRegionServerReport</a>(long&nbsp;reportStartTime,
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1219">tryRegionServerReport</a>(long&nbsp;reportStartTime,
                                      long&nbsp;reportEndTime)
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -2896,7 +2896,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>reportRegionSizesForQuotas</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1248">reportRegionSizesForQuotas</a>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;regionSizeStore)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1253">reportRegionSizesForQuotas</a>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;regionSizeStore)</pre>
 <div class="block">Reports the given map of Regions and their size on the filesystem to the active Master.</div>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -2914,7 +2914,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>buildReportAndSend</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1291">buildReportAndSend</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterface&nbsp;rss,
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1296">buildReportAndSend</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService.BlockingInterface&nbsp;rss,
                         <a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;regionSizeStore)
                  throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException</pre>
 <div class="block">Builds the region size report and sends it to the master. Upon successful sending of the
@@ -2934,7 +2934,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>buildRegionSpaceUseReportRequest</h4>
-<pre>org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequest&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1308">buildRegionSpaceUseReportRequest</a>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;regionSizes)</pre>
+<pre>org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequest&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1313">buildRegionSpaceUseReportRequest</a>(<a href="../../../../../org/apache/hadoop/hbase/quotas/RegionSizeStore.html" title="interface in org.apache.hadoop.hbase.quotas">RegionSizeStore</a>&nbsp;regionSizes)</pre>
 <div class="block">Builds a <code>RegionServerStatusProtos.RegionSpaceUseReportRequest</code> protobuf message from the region size map.</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -2950,7 +2950,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>convertRegionSize</h4>
-<pre>org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1324">convertRegionSize</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
+<pre>org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1329">convertRegionSize</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo,
                                                                                                             <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>&nbsp;sizeInBytes)</pre>
 <div class="block">Converts a pair of <a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client"><code>RegionInfo</code></a> and <code>long</code> into a <code>RegionServerStatusProtos.RegionSpaceUse</code>
  protobuf message.</div>
@@ -2969,7 +2969,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>buildServerLoad</h4>
-<pre>org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1331">buildServerLoad</a>(long&nbsp;reportStartTime,
+<pre>org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1336">buildServerLoad</a>(long&nbsp;reportStartTime,
                                                                                                  long&nbsp;reportEndTime)
                                                                                           throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -2984,7 +2984,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnlineRegionsAsPrintableString</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1402">getOnlineRegionsAsPrintableString</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1407">getOnlineRegionsAsPrintableString</a>()</pre>
 </li>
 </ul>
 <a name="waitOnAllRegionsToClose-boolean-">
@@ -2993,7 +2993,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>waitOnAllRegionsToClose</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1414">waitOnAllRegionsToClose</a>(boolean&nbsp;abort)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1419">waitOnAllRegionsToClose</a>(boolean&nbsp;abort)</pre>
 <div class="block">Wait on regions close.</div>
 </li>
 </ul>
@@ -3003,7 +3003,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>sleep</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1468">sleep</a>(long&nbsp;millis)</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1473">sleep</a>(long&nbsp;millis)</pre>
 </li>
 </ul>
 <a name="shutdownWAL-boolean-">
@@ -3012,7 +3012,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>shutdownWAL</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1479">shutdownWAL</a>(boolean&nbsp;close)</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1484">shutdownWAL</a>(boolean&nbsp;close)</pre>
 </li>
 </ul>
 <a name="handleReportForDutyResponse-org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse-">
@@ -3021,7 +3021,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>handleReportForDutyResponse</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1500">handleReportForDutyResponse</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse&nbsp;c)
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1505">handleReportForDutyResponse</a>(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse&nbsp;c)
                                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3035,7 +3035,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>initializeMemStoreChunkCreator</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1594">initializeMemStoreChunkCreator</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1599">initializeMemStoreChunkCreator</a>()</pre>
 </li>
 </ul>
 <a name="startHeapMemoryManager--">
@@ -3044,7 +3044,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>startHeapMemoryManager</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1615">startHeapMemoryManager</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1620">startHeapMemoryManager</a>()</pre>
 </li>
 </ul>
 <a name="createMyEphemeralNode--">
@@ -3053,7 +3053,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>createMyEphemeralNode</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1623">createMyEphemeralNode</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1628">createMyEphemeralNode</a>()
                             throws org.apache.zookeeper.KeeperException,
                                    <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
@@ -3069,7 +3069,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>deleteMyEphemeralNode</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1631">deleteMyEphemeralNode</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1636">deleteMyEphemeralNode</a>()
                             throws org.apache.zookeeper.KeeperException</pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3083,7 +3083,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionServerAccounting</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerAccounting.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerAccounting</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1636">getRegionServerAccounting</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerAccounting.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerAccounting</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1641">getRegionServerAccounting</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html#getRegionServerAccounting--">getRegionServerAccounting</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a></code></dd>
@@ -3098,7 +3098,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>createRegionLoad</h4>
-<pre>org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1648">createRegionLoad</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;r,
+<pre>org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1653">createRegionLoad</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;r,
                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder&nbsp;regionLoadBldr,
                                                                                                   org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder&nbsp;regionSpecifier)
                                                                                            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
@@ -3114,7 +3114,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>createRegionLoad</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1717">createRegionLoad</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedRegionName)
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1722">createRegionLoad</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;encodedRegionName)
                                                                                                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -3132,7 +3132,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>isOnline</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1831">isOnline</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1836">isOnline</a>()</pre>
 <div class="block">Report the status of the server. A server is online once all the startup is
  completed (setting up filesystem, starting executorService threads, etc.). This
  method is designed mostly to be useful in tests.</div>
@@ -3148,7 +3148,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>setupWALAndReplication</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1839">setupWALAndReplication</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1844">setupWALAndReplication</a>()
                              throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Setup WAL log and replication if enabled. Replication setup is done in here because it wants to
  be hooked up to WAL.</div>
@@ -3164,7 +3164,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>startReplicationService</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1871">startReplicationService</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1876">startReplicationService</a>()
                               throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Start up replication source and sink handlers.</div>
 <dl>
@@ -3179,7 +3179,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getRegionServerMetrics</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1886">getRegionServerMetrics</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">MetricsRegionServer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1891">getRegionServerMetrics</a>()</pre>
 </li>
 </ul>
 <a name="getMasterAddressTracker--">
@@ -3188,7 +3188,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getMasterAddressTracker</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.html" title="class in org.apache.hadoop.hbase.zookeeper">MasterAddressTracker</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1893">getMasterAddressTracker</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.html" title="class in org.apache.hadoop.hbase.zookeeper">MasterAddressTracker</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1898">getMasterAddressTracker</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>Master address tracker instance.</dd>
@@ -3201,7 +3201,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>startServices</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1911">startServices</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.1916">startServices</a>()
                     throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3215,7 +3215,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>initializeThreads</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2013">initializeThreads</a>()
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2018">initializeThreads</a>()
                         throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3229,7 +3229,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>registerConfigurationObservers</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2060">registerConfigurationObservers</a>()</pre>
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2065">registerConfigurationObservers</a>()</pre>
 </li>
 </ul>
 <a name="putUpWebUI--">
@@ -3238,7 +3238,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>putUpWebUI</h4>
-<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2072">putUpWebUI</a>()
+<pre>private&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2077">putUpWebUI</a>()
                 throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Puts up the webui.</div>
 <dl>
@@ -3255,7 +3255,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>isHealthy</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2126">isHealthy</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2131">isHealthy</a>()</pre>
 </li>
 </ul>
 <a name="getWALs--">
@@ -3264,7 +3264,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getWALs</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2144">getWALs</a>()
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2149">getWALs</a>()
                   throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -3283,7 +3283,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getWAL</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2149">getWAL</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo)
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/wal/WAL.html" title="interface in org.apache.hadoop.hbase.wal">WAL</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2154">getWAL</a>(<a href="../../../../../org/apache/hadoop/hbase/client/RegionInfo.html" title="interface in org.apache.hadoop.hbase.client">RegionInfo</a>&nbsp;regionInfo)
            throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
@@ -3302,7 +3302,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getWalRoller</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/LogRoller.html" title="class in org.apache.hadoop.hbase.regionserver">LogRoller</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2157">getWalRoller</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/LogRoller.html" title="class in org.apache.hadoop.hbase.regionserver">LogRoller</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2162">getWalRoller</a>()</pre>
 </li>
 </ul>
 <a name="getConnection--">
@@ -3311,7 +3311,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getConnection</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2162">getConnection</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/Connection.html" title="interface in org.apache.hadoop.hbase.client">Connection</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2167">getConnection</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#getConnection--">Server</a></code></span></div>
 <div class="block">Returns a reference to the servers' connection.
 
@@ -3329,7 +3329,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getClusterConnection</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2167">getClusterConnection</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html" title="interface in org.apache.hadoop.hbase.client">ClusterConnection</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2172">getClusterConnection</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#getClusterConnection--">Server</a></code></span></div>
 <div class="block">Returns a reference to the servers' cluster connection. Prefer <a href="../../../../../org/apache/hadoop/hbase/Server.html#getConnection--"><code>Server.getConnection()</code></a>.
 
@@ -3347,7 +3347,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getMetaTableLocator</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2172">getMetaTableLocator</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper">MetaTableLocator</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2177">getMetaTableLocator</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#getMetaTableLocator--">Server</a></code></span></div>
 <div class="block">Returns instance of <a href="../../../../../org/apache/hadoop/hbase/zookeeper/MetaTableLocator.html" title="class in org.apache.hadoop.hbase.zookeeper"><code>MetaTableLocator</code></a>
  running inside this server. This MetaServerLocator is started and stopped by server, clients
@@ -3366,7 +3366,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2177">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msg)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2182">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msg)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html#stop-java.lang.String-">Stoppable</a></code></span></div>
 <div class="block">Stop this service.
  Implementers should favor logging errors over throwing RuntimeExceptions.</div>
@@ -3384,7 +3384,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>stop</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2187">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msg,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2192">stop</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;msg,
                  boolean&nbsp;force,
                  <a href="../../../../../org/apache/hadoop/hbase/security/User.html" title="class in org.apache.hadoop.hbase.security">User</a>&nbsp;user)</pre>
 <div class="block">Stops the regionserver.</div>
@@ -3402,7 +3402,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForServerOnline</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2209">waitForServerOnline</a>()</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2214">waitForServerOnline</a>()</pre>
 </li>
 </ul>
 <a name="postOpenDeployTasks-org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployContext-">
@@ -3411,7 +3411,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>postOpenDeployTasks</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2223">postOpenDeployTasks</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerServices.PostOpenDeployContext</a>&nbsp;context)
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2228">postOpenDeployTasks</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.PostOpenDeployContext.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerServices.PostOpenDeployContext</a>&nbsp;context)
                          throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html#postOpenDeployTasks-org.apache.hadoop.hbase.regionserver.RegionServerServices.PostOpenDeployContext-">RegionServerServices</a></code></span></div>
 <div class="block">Tasks to perform after region open to complete deploy of region on regionserver</div>
@@ -3431,7 +3431,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>reportRegionStateTransition</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2255">reportRegionStateTransition</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerServices.RegionStateTransitionContext</a>&nbsp;context)</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2260">reportRegionStateTransition</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.RegionStateTransitionContext.html" title="class in org.apache.hadoop.hbase.regionserver">RegionServerServices.RegionStateTransitionContext</a>&nbsp;context)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html#reportRegionStateTransition-org.apache.hadoop.hbase.regionserver.RegionServerServices.RegionStateTransitionContext-">RegionServerServices</a></code></span></div>
 <div class="block">Notify master that a handler requests to change a region state</div>
 <dl>
@@ -3446,7 +3446,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>triggerFlushInPrimaryRegion</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2354">triggerFlushInPrimaryRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;region)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2359">triggerFlushInPrimaryRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;region)</pre>
 <div class="block">Trigger a flush in the primary region replica if this region is a secondary replica. Does not
  block this thread. See RegionReplicaFlushHandler for details.</div>
 </li>
@@ -3457,7 +3457,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getRpcServer</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ipc/RpcServerInterface.html" title="interface in org.apache.hadoop.hbase.ipc">RpcServerInterface</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2376">getRpcServer</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ipc/RpcServerInterface.html" title="interface in org.apache.hadoop.hbase.ipc">RpcServerInterface</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2381">getRpcServer</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html#getRpcServer--">RegionServerServices</a></code></span></div>
 <div class="block">Returns a reference to the region server's RPC server</div>
 <dl>
@@ -3472,7 +3472,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getRSRpcServices</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2381">getRSRpcServices</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/RSRpcServices.html" title="class in org.apache.hadoop.hbase.regionserver">RSRpcServices</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2386">getRSRpcServices</a>()</pre>
 </li>
 </ul>
 <a name="abort-java.lang.String-java.lang.Throwable-">
@@ -3481,7 +3481,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>abort</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2396">abort</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;reason,
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2401">abort</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;reason,
                   <a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true" title="class or interface in java.lang">Throwable</a>&nbsp;cause)</pre>
 <div class="block">Cause the server to exit without closing the regions it is serving, the log
  it is using and without notifying the master. Used unit testing and on
@@ -3501,7 +3501,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>abort</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2439">abort</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;reason)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2444">abort</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;reason)</pre>
 <dl>
 <dt><span class="seeLabel">See Also:</span></dt>
 <dd><a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegionServer.html#abort-java.lang.String-java.lang.Throwable-"><code>abort(String, Throwable)</code></a></dd>
@@ -3514,7 +3514,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>isAborted</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2444">isAborted</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2449">isAborted</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Abortable.html#isAborted--">Abortable</a></code></span></div>
 <div class="block">Check if the server or client was aborted.</div>
 <dl>
@@ -3531,7 +3531,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>kill</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2454">kill</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2459">kill</a>()</pre>
 </li>
 </ul>
 <a name="sendShutdownInterrupt--">
@@ -3540,7 +3540,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>sendShutdownInterrupt</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2462">sendShutdownInterrupt</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2467">sendShutdownInterrupt</a>()</pre>
 <div class="block">Called on stop/abort before closing the cluster connection and meta locator.</div>
 </li>
 </ul>
@@ -3550,7 +3550,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>stopServiceThreads</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2469">stopServiceThreads</a>()</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2474">stopServiceThreads</a>()</pre>
 <div class="block">Wait on all threads to finish. Presumption is that all closes and stops
  have already been called.</div>
 </li>
@@ -3561,7 +3561,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getReplicationSourceService</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSourceService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationSourceService</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2515">getReplicationSourceService</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSourceService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationSourceService</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2520">getReplicationSourceService</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html#getReplicationSourceService--">getReplicationSourceService</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/RegionServerServices.html" title="interface in org.apache.hadoop.hbase.regionserver">RegionServerServices</a></code></dd>
@@ -3577,7 +3577,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getReplicationSinkService</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSinkService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationSinkService</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2522">getReplicationSinkService</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/regionserver/ReplicationSinkService.html" title="interface in org.apache.hadoop.hbase.regionserver">ReplicationSinkService</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2527">getReplicationSinkService</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>Return the object that implements the replication sink executorService.</dd>
@@ -3590,7 +3590,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>createRegionServerStatusStub</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2535">createRegionServerStatusStub</a>()</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2540">createRegionServerStatusStub</a>()</pre>
 <div class="block">Get the current master from ZooKeeper and open the RPC connection to it.
  To get a fresh connection, the current rssStub must be null.
  Method will block until a master is available. You can break from this
@@ -3607,7 +3607,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>createRegionServerStatusStub</h4>
-<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2548">createRegionServerStatusStub</a>(boolean&nbsp;refresh)</pre>
+<pre>protected&nbsp;<a href="../../../../../org/apache/hadoop/hbase/ServerName.html" title="class in org.apache.hadoop.hbase">ServerName</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2553">createRegionServerStatusStub</a>(boolean&nbsp;refresh)</pre>
 <div class="block">Get the current master from ZooKeeper and open the RPC connection to it. To get a fresh
  connection, the current rssStub must be null. Method will block until a master is available.
  You can break from this block by requesting the server stop.</div>
@@ -3625,7 +3625,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>keepLooping</h4>
-<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2620">keepLooping</a>()</pre>
+<pre>private&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2625">keepLooping</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>True if we should break loop because cluster is going down or
@@ -3639,7 +3639,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>reportForDuty</h4>
-<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2631">reportForDuty</a>()
+<pre>private&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2636">reportForDuty</a>()
                                                                                                                       throws <a href="https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <dl>
 <dt><span class="throwsLabel">Throws:</span></dt>
@@ -3653,7 +3653,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getLastSequenceId</h4>
-<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2672">getLastSequenceId</a>(byte[]&nbsp;encodedRegionName)</pre>
+<pre>public&nbsp;org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2677">getLastSequenceId</a>(byte[]&nbsp;encodedRegionName)</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/regionserver/LastSequenceId.html#getLastSequenceId-byte:A-">getLastSequenceId</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/LastSequenceId.html" title="interface in org.apache.hadoop.hbase.regionserver">LastSequenceId</a></code></dd>
@@ -3671,7 +3671,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>closeAllRegions</h4>
-<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2703">closeAllRegions</a>(boolean&nbsp;abort)</pre>
+<pre>protected&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2708">closeAllRegions</a>(boolean&nbsp;abort)</pre>
 <div class="block">Closes all regions.  Called on our way out.
  Assumes that its not possible for new regions to be added to onlineRegions
  while this method runs.</div>
@@ -3683,7 +3683,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>closeMetaTableRegions</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2712">closeMetaTableRegions</a>(boolean&nbsp;abort)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2717">closeMetaTableRegions</a>(boolean&nbsp;abort)</pre>
 <div class="block">Close meta region if we carry it</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -3697,7 +3697,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>closeUserRegions</h4>
-<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2735">closeUserRegions</a>(boolean&nbsp;abort)</pre>
+<pre>void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2740">closeUserRegions</a>(boolean&nbsp;abort)</pre>
 <div class="block">Schedule closes on all user regions.
  Should be safe calling multiple times because it wont' close regions
  that are already closed or that are closing.</div>
@@ -3713,7 +3713,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getInfoServer</h4>
-<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/http/InfoServer.html" title="class in org.apache.hadoop.hbase.http">InfoServer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2751">getInfoServer</a>()</pre>
+<pre>public&nbsp;<a href="../../../../../org/apache/hadoop/hbase/http/InfoServer.html" title="class in org.apache.hadoop.hbase.http">InfoServer</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2756">getInfoServer</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the info server</dd>
@@ -3726,7 +3726,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>isStopped</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2759">isStopped</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2764">isStopped</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html#isStopped--">isStopped</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Stoppable.html" title="interface in org.apache.hadoop.hbase">Stoppable</a></code></dd>
@@ -3741,7 +3741,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>isStopping</h4>
-<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2764">isStopping</a>()</pre>
+<pre>public&nbsp;boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2769">isStopping</a>()</pre>
 <dl>
 <dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
 <dd><code><a href="../../../../../org/apache/hadoop/hbase/Server.html#isStopping--">isStopping</a></code>&nbsp;in interface&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Server.html" title="interface in org.apache.hadoop.hbase">Server</a></code></dd>
@@ -3756,7 +3756,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getConfiguration</h4>
-<pre>public&nbsp;org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2773">getConfiguration</a>()</pre>
+<pre>public&nbsp;org.apache.hadoop.conf.Configuration&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2778">getConfiguration</a>()</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/Server.html#getConfiguration--">Server</a></code></span></div>
 <div class="block">Gets the configuration object for this server.</div>
 <dl>
@@ -3773,7 +3773,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getWriteLock</h4>
-<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.WriteLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantReadWriteLock.WriteLock</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2778">getWriteLock</a>()</pre>
+<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.WriteLock.html?is-external=true" title="class or interface in java.util.concurrent.locks">ReentrantReadWriteLock.WriteLock</a>&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2783">getWriteLock</a>()</pre>
 <dl>
 <dt><span class="returnLabel">Returns:</span></dt>
 <dd>the write lock for the server</dd>
@@ -3786,7 +3786,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getNumberOfOnlineRegions</h4>
-<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2782">getNumberOfOnlineRegions</a>()</pre>
+<pre>public&nbsp;int&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2787">getNumberOfOnlineRegions</a>()</pre>
 </li>
 </ul>
 <a name="isOnlineRegionsEmpty--">
@@ -3795,7 +3795,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>isOnlineRegionsEmpty</h4>
-<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2786">isOnlineRegionsEmpty</a>()</pre>
+<pre>boolean&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2791">isOnlineRegionsEmpty</a>()</pre>
 </li>
 </ul>
 <a name="getOnlineRegionsLocalContext--">
@@ -3804,7 +3804,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>getOnlineRegionsLocalContext</h4>
-<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2795">getOnlineRegionsLocalContext</a>()</pre>
+<pre>public&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&gt;&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2800">getOnlineRegionsLocalContext</a>()</pre>
 <div class="block">For tests, web ui and metrics.
  This method will only work if HRegionServer is in the same JVM as client;
  HRegion cannot be serialized to cross an rpc.</div>
@@ -3816,7 +3816,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>addRegion</h4>
-<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2801">addRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;region)</pre>
+<pre>public&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2806">addRegion</a>(<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&nbsp;region)</pre>
 <div class="block"><span class="descfrmTypeLabel">Description copied from interface:&nbsp;<code><a href="../../../../../org/apache/hadoop/hbase/regionserver/MutableOnlineRegions.html#addRegion-org.apache.hadoop.hbase.regionserver.HRegion-">MutableOnlineRegions</a></code></span></div>
 <div class="block">Add to online regions.</div>
 <dl>
@@ -3831,7 +3831,7 @@ protected static final&nbsp;<a href="https://docs.oracle.com/javase/8/docs/api/j
 <ul class="blockList">
 <li class="blockList">
 <h4>addRegion</h4>
-<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2806">addRegion</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&gt;&gt;&nbsp;sortedRegions,
+<pre>private&nbsp;void&nbsp;<a href="../../../../../src-html/org/apache/hadoop/hbase/regionserver/HRegionServer.html#line.2811">addRegion</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/util/SortedMap.html?is-external=true" title="class or interface in java.util">SortedMap</a>&lt;<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true" title="class or interface in java.lang">Long</a>,<a href="https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true" title="class or interface in java.util">Collection</a>&lt;<a href="../../../../../org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a>&gt;&gt;&nbsp;sortedRegions,
        

<TRUNCATED>